diff --git a/.e2e-token b/.e2e-token
index 57c6271..69527f7 100644
--- a/.e2e-token
+++ b/.e2e-token
@@ -1 +1 @@
-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhZG1pbiIsInR5cGUiOiJhY2Nlc3MiLCJpYXQiOjE3NTUyMDAyNzMsImV4cCI6MTc1NTIxNDY3M30.VfcV_zbhtSe50u1awNC4v2O8CU4PQ9AwhlcNeNn40cM
\ No newline at end of file
+eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhZG1pbiIsImV4cCI6MTc1NTMxMzA3NCwiaWF0IjoxNzU1MzA1ODc0LCJ0eXBlIjoiYWNjZXNzIn0.u0yIrs1ukrDZrl1DCYGQfyZYQizkl4RPfGJeHzXF6D4
diff --git a/app/api/admin.py b/app/api/admin.py
index fc0ef6e..bf7e274 100644
--- a/app/api/admin.py
+++ b/app/api/admin.py
@@ -90,14 +90,14 @@ class UserResponse(BaseModel):
id: int
username: str
email: str
- first_name: Optional[str]
- last_name: Optional[str]
- is_admin: bool
- is_active: bool
- is_approver: bool
- last_login: Optional[datetime]
- created_at: Optional[datetime]
- updated_at: Optional[datetime]
+ first_name: Optional[str] = None
+ last_name: Optional[str] = None
+ is_admin: bool = False
+ is_active: bool = True
+ is_approver: bool = False
+ last_login: Optional[datetime] = None
+ created_at: Optional[datetime] = None
+ updated_at: Optional[datetime] = None
model_config = ConfigDict(from_attributes=True)
diff --git a/app/api/billing.py b/app/api/billing.py
new file mode 100644
index 0000000..f0fc95c
--- /dev/null
+++ b/app/api/billing.py
@@ -0,0 +1,1607 @@
+"""
+Billing & Statements API endpoints
+"""
+from typing import List, Optional, Dict, Any, Set
+from datetime import datetime, timezone, date, timedelta
+import os
+import re
+from pathlib import Path
+import asyncio
+import logging
+import threading
+import time
+from enum import Enum
+
+from fastapi import APIRouter, Depends, HTTPException, status, Query, BackgroundTasks
+from fastapi import Path as PathParam
+from fastapi.responses import FileResponse
+from fastapi import WebSocket, WebSocketDisconnect
+from pydantic import BaseModel, ConfigDict, Field
+from sqlalchemy.orm import Session, joinedload
+from sqlalchemy.exc import SQLAlchemyError
+
+from app.database.base import get_db, SessionLocal
+from app.models.files import File
+from app.models.ledger import Ledger
+from app.models.rolodex import Rolodex
+from app.models.user import User
+from app.auth.security import get_current_user, verify_token
+from app.utils.responses import BulkOperationResponse, ErrorDetail
+from app.utils.logging import StructuredLogger
+from app.services.cache import cache_get_json, cache_set_json
+from app.models.billing import BillingBatch, BillingBatchFile
+
+
+router = APIRouter()
+
+# Initialize logger for billing operations
+billing_logger = StructuredLogger("billing_operations", "INFO")
+
+# Realtime WebSocket subscriber registry: batch_id -> set[WebSocket]
+_subscribers_by_batch: Dict[str, Set[WebSocket]] = {}
+_subscribers_lock = asyncio.Lock()
+
+
+async def _notify_progress_subscribers(progress: "BatchProgress") -> None:
+ """Broadcast latest progress to active subscribers of a batch."""
+ batch_id = progress.batch_id
+ message = {"type": "progress", "data": progress.model_dump()}
+ async with _subscribers_lock:
+ sockets = list(_subscribers_by_batch.get(batch_id, set()))
+ if not sockets:
+ return
+ dead: List[WebSocket] = []
+ for ws in sockets:
+ try:
+ await ws.send_json(message)
+ except Exception:
+ dead.append(ws)
+ if dead:
+ async with _subscribers_lock:
+ bucket = _subscribers_by_batch.get(batch_id)
+ if bucket:
+ for ws in dead:
+ bucket.discard(ws)
+ if not bucket:
+ _subscribers_by_batch.pop(batch_id, None)
+
+
+def _round(value: Optional[float]) -> float:
+ try:
+ return round(float(value or 0.0), 2)
+ except Exception:
+ return 0.0
+
+
+class StatementEntry(BaseModel):
+ id: int
+ date: Optional[date]
+ t_code: str
+ t_type: str
+ description: Optional[str] = None
+ quantity: float = 0.0
+ rate: float = 0.0
+ amount: float
+
+ model_config = ConfigDict(from_attributes=True)
+
+
+class StatementTotals(BaseModel):
+ charges_billed: float
+ charges_unbilled: float
+ charges_total: float
+ payments: float
+ trust_balance: float
+ current_balance: float
+
+
+class StatementResponse(BaseModel):
+ file_no: str
+ client_name: Optional[str] = None
+ as_of: str
+ totals: StatementTotals
+ unbilled_entries: List[StatementEntry]
+class BatchHistorySummary(BaseModel):
+ batch_id: str
+ status: str
+ total_files: int
+ successful_files: int
+ failed_files: int
+ started_at: str
+ updated_at: str
+ completed_at: Optional[str] = None
+ processing_time_seconds: Optional[float] = None
+
+
+
+
+@router.get("/statements/batch-list", response_model=List[str])
+async def list_active_batches(
+ current_user: User = Depends(get_current_user),
+):
+ """
+ List all currently active batch statement generation operations.
+
+ Returns batch IDs for operations that are currently pending or running.
+ Completed, failed, and cancelled operations are excluded.
+
+ **Returns:**
+ - List of active batch IDs that can be used with the progress endpoint
+
+ **Usage:**
+ Use this endpoint to discover active batch operations for progress monitoring.
+ """
+ # Get the global progress store instance
+ # Note: progress_store is defined later in this module but is available at runtime
+ global progress_store
+ return await progress_store.list_active_batches()
+
+
+@router.get("/statements/batch-progress/{batch_id}", response_model=Dict[str, Any])
+async def get_batch_progress(
+ batch_id: str = PathParam(..., description="Batch operation identifier"),
+ current_user: User = Depends(get_current_user),
+):
+ """
+ Get real-time progress information for a batch statement generation operation.
+
+ Provides comprehensive progress tracking including:
+ - Overall batch status and completion percentage
+ - Individual file processing status and timing
+ - Current file being processed
+ - Estimated completion time based on processing rate
+ - Success/failure rates and error details
+
+ **Parameters:**
+ - **batch_id**: Unique identifier for the batch operation
+
+ **Returns:**
+ - Complete progress information including:
+ - Batch status (pending, running, completed, failed, cancelled)
+ - File counts (total, processed, successful, failed)
+ - Timing information and estimates
+ - Individual file details and results
+ - Error information if applicable
+
+ **Errors:**
+ - 404: Batch operation not found (may have expired or never existed)
+ """
+ # Get the global progress store instance
+ global progress_store
+ progress = await progress_store.get_progress(batch_id)
+
+ if not progress:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Batch operation {batch_id} not found. It may have expired or never existed."
+ )
+
+ return progress.model_dump()
+
+
+@router.get("/statements/batch-history", response_model=List[BatchHistorySummary])
+async def list_batch_history(
+ status_filter: Optional[str] = Query(None, description="Status filter: pending|running|completed|failed|cancelled"),
+ sort: Optional[str] = Query("updated_desc", description="Sort: updated_desc|updated_asc|started_desc|started_asc|completed_desc|completed_asc"),
+ limit: int = Query(50, ge=1, le=200),
+ offset: int = Query(0, ge=0, le=10000),
+ start_date: Optional[str] = Query(None, description="ISO start bound (filters started_at)"),
+ end_date: Optional[str] = Query(None, description="ISO end bound (filters started_at)"),
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """List batch operations from persistent history with filters and pagination."""
+ q = db.query(BillingBatch)
+ if status_filter:
+ q = q.filter(BillingBatch.status == status_filter)
+
+ def _parse(dt: Optional[str]):
+ if not dt:
+ return None
+ try:
+ from datetime import datetime as _dt
+ return _dt.fromisoformat(dt.replace('Z', '+00:00'))
+ except Exception:
+ return None
+
+ if start_date:
+ sd = _parse(start_date)
+ if sd:
+ q = q.filter(BillingBatch.started_at >= sd)
+ if end_date:
+ ed = _parse(end_date)
+ if ed:
+ q = q.filter(BillingBatch.started_at <= ed)
+
+ sort_map = {
+ "updated_desc": (BillingBatch.updated_at.desc(),),
+ "updated_asc": (BillingBatch.updated_at.asc(),),
+ "started_desc": (BillingBatch.started_at.desc(),),
+ "started_asc": (BillingBatch.started_at.asc(),),
+ "completed_desc": (BillingBatch.completed_at.desc(),),
+ "completed_asc": (BillingBatch.completed_at.asc(),),
+ }
+ q = q.order_by(*sort_map.get(sort or "updated_desc", sort_map["updated_desc"]))
+
+ rows = q.offset(offset).limit(limit).all()
+ items: List[BatchHistorySummary] = []
+ for r in rows:
+ items.append(BatchHistorySummary(
+ batch_id=r.batch_id,
+ status=r.status,
+ total_files=r.total_files,
+ successful_files=r.successful_files,
+ failed_files=r.failed_files,
+ started_at=r.started_at.isoformat() if r.started_at else None,
+ updated_at=r.updated_at.isoformat() if r.updated_at else None,
+ completed_at=r.completed_at.isoformat() if r.completed_at else None,
+ processing_time_seconds=r.processing_time_seconds,
+ ))
+ return items
+
+
+@router.get("/statements/{file_no}", response_model=StatementResponse)
+async def get_statement_snapshot(
+ file_no: str,
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """Return a computed statement snapshot for a file.
+
+ Includes totals (billed/unbilled charges, payments, trust balance, current balance)
+ and an itemized list of unbilled transactions.
+ """
+ file_obj = (
+ db.query(File)
+ .options(joinedload(File.owner))
+ .filter(File.file_no == file_no)
+ .first()
+ )
+
+ if not file_obj:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="File not found",
+ )
+
+ # Load relevant ledger entries once
+ entries: List[Ledger] = db.query(Ledger).filter(Ledger.file_no == file_no).all()
+
+ # Charges are debits: hourly (2), flat (3), disbursements (4)
+ CHARGE_TYPES = {"2", "3", "4"}
+
+ charges_billed = sum(e.amount for e in entries if e.t_type in CHARGE_TYPES and e.billed == "Y")
+ charges_unbilled = sum(e.amount for e in entries if e.t_type in CHARGE_TYPES and e.billed != "Y")
+ charges_total = charges_billed + charges_unbilled
+
+ # Payments/credits are type 5
+ payments_total = sum(e.amount for e in entries if e.t_type == "5")
+
+ # Trust balance is tracked on File (kept in sync by ledger endpoints)
+ trust_balance = file_obj.trust_bal or 0.0
+
+ # Current balance is total charges minus payments
+ current_balance = charges_total - payments_total
+
+ # Itemized unbilled entries (charges only)
+ unbilled_entries = [
+ StatementEntry(
+ id=e.id,
+ date=e.date,
+ t_code=e.t_code,
+ t_type=e.t_type,
+ description=e.note,
+ quantity=e.quantity or 0.0,
+ rate=e.rate or 0.0,
+ amount=e.amount,
+ )
+ for e in entries
+ if e.t_type in CHARGE_TYPES and e.billed != "Y"
+ ]
+
+ client_name = None
+ if file_obj.owner:
+ client_name = f"{file_obj.owner.first or ''} {file_obj.owner.last}".strip()
+
+ response = StatementResponse(
+ file_no=file_no,
+ client_name=client_name or None,
+ as_of=datetime.now(timezone.utc).isoformat(),
+ totals=StatementTotals(
+ charges_billed=_round(charges_billed),
+ charges_unbilled=_round(charges_unbilled),
+ charges_total=_round(charges_total),
+ payments=_round(payments_total),
+ trust_balance=_round(trust_balance),
+ current_balance=_round(current_balance),
+ ),
+ unbilled_entries=unbilled_entries,
+ )
+
+ return response
+
+
+class GenerateStatementRequest(BaseModel):
+ file_no: str
+ period: Optional[str] = None # Supports YYYY-MM for monthly; optional
+
+
+class GeneratedStatementMeta(BaseModel):
+ file_no: str
+ client_name: Optional[str] = None
+ as_of: str
+ period: Optional[str] = None
+ totals: StatementTotals
+ unbilled_count: int
+ export_path: str
+ filename: str
+ size: int
+ content_type: str = "text/html"
+
+
+class BatchGenerateStatementRequest(BaseModel):
+ file_numbers: List[str] = Field(..., description="List of file numbers to generate statements for", max_length=50)
+ period: Optional[str] = Field(None, description="Optional period filter in YYYY-MM format")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "file_numbers": ["ABC-123", "DEF-456", "GHI-789"],
+ "period": "2024-01"
+ }
+ }
+ )
+
+
+class BatchFileResult(BaseModel):
+ file_no: str
+ status: str # "success" or "failed"
+ message: Optional[str] = None
+ statement_meta: Optional[GeneratedStatementMeta] = None
+ error_details: Optional[str] = None
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "file_no": "ABC-123",
+ "status": "success",
+ "message": "Statement generated successfully",
+ "statement_meta": {
+ "file_no": "ABC-123",
+ "filename": "statement_ABC-123_20240115_143022.html",
+ "size": 2048
+ }
+ }
+ }
+ )
+
+
+class BatchGenerateStatementResponse(BaseModel):
+ batch_id: str = Field(..., description="Unique identifier for this batch operation")
+ total_files: int = Field(..., description="Total number of files requested")
+ successful: int = Field(..., description="Number of files processed successfully")
+ failed: int = Field(..., description="Number of files that failed processing")
+ success_rate: float = Field(..., description="Success rate as percentage")
+ started_at: str = Field(..., description="ISO timestamp when batch started")
+ completed_at: str = Field(..., description="ISO timestamp when batch completed")
+ processing_time_seconds: float = Field(..., description="Total processing time in seconds")
+ results: List[BatchFileResult] = Field(..., description="Individual file processing results")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "batch_id": "batch_20240115_143022_abc123",
+ "total_files": 3,
+ "successful": 2,
+ "failed": 1,
+ "success_rate": 66.67,
+ "started_at": "2024-01-15T14:30:22.123456+00:00",
+ "completed_at": "2024-01-15T14:30:27.654321+00:00",
+ "processing_time_seconds": 5.53,
+ "results": [
+ {
+ "file_no": "ABC-123",
+ "status": "success",
+ "message": "Statement generated successfully"
+ }
+ ]
+ }
+ }
+ )
+
+
+class BatchStatus(str, Enum):
+ """Enumeration of batch operation statuses."""
+ PENDING = "pending"
+ RUNNING = "running"
+ COMPLETED = "completed"
+ FAILED = "failed"
+ CANCELLED = "cancelled"
+
+
+class BatchProgressEntry(BaseModel):
+ """Progress information for a single file in a batch operation."""
+ file_no: str
+ status: str # "pending", "processing", "completed", "failed"
+ started_at: Optional[str] = None
+ completed_at: Optional[str] = None
+ error_message: Optional[str] = None
+ statement_meta: Optional[GeneratedStatementMeta] = None
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "file_no": "ABC-123",
+ "status": "completed",
+ "started_at": "2024-01-15T14:30:22.123456+00:00",
+ "completed_at": "2024-01-15T14:30:25.654321+00:00",
+ "statement_meta": {
+ "file_no": "ABC-123",
+ "filename": "statement_ABC-123_20240115_143022.html",
+ "size": 2048
+ }
+ }
+ }
+ )
+
+
+class BatchProgress(BaseModel):
+ """Comprehensive progress information for a batch operation."""
+ batch_id: str
+ status: BatchStatus
+ total_files: int
+ processed_files: int
+ successful_files: int
+ failed_files: int
+ current_file: Optional[str] = None
+ started_at: str
+ updated_at: str
+ completed_at: Optional[str] = None
+ estimated_completion: Optional[str] = None
+ processing_time_seconds: Optional[float] = None
+ success_rate: Optional[float] = None
+ files: List[BatchProgressEntry] = Field(default_factory=list)
+ error_message: Optional[str] = None
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "batch_id": "batch_20240115_143022_abc123",
+ "status": "running",
+ "total_files": 5,
+ "processed_files": 2,
+ "successful_files": 2,
+ "failed_files": 0,
+ "current_file": "ABC-123",
+ "started_at": "2024-01-15T14:30:22.123456+00:00",
+ "updated_at": "2024-01-15T14:30:24.789012+00:00",
+ "estimated_completion": "2024-01-15T14:30:30.000000+00:00",
+ "files": [
+ {
+ "file_no": "ABC-123",
+ "status": "processing",
+ "started_at": "2024-01-15T14:30:24.789012+00:00"
+ }
+ ]
+ }
+ }
+ )
+
+
+class BatchProgressStore:
+ """
+ Thread-safe progress store for batch operations with caching support.
+
+ Uses Redis for distributed caching when available, falls back to in-memory storage.
+ Includes automatic cleanup of old progress data.
+ """
+
+ def __init__(self):
+ self._lock = threading.RLock()
+ self._in_memory_store: Dict[str, BatchProgress] = {}
+ self._cleanup_interval = 3600 # 1 hour
+ self._retention_period = 86400 # 24 hours
+ self._last_cleanup = time.time()
+
+ def _should_cleanup(self) -> bool:
+ """Check if cleanup should be performed."""
+ return time.time() - self._last_cleanup > self._cleanup_interval
+
+ async def _cleanup_old_entries(self) -> None:
+ """Remove old progress entries based on retention policy."""
+ if not self._should_cleanup():
+ return
+
+ cutoff_time = datetime.now(timezone.utc) - timedelta(seconds=self._retention_period)
+ cutoff_str = cutoff_time.isoformat()
+
+ with self._lock:
+ # Clean up in-memory store
+ expired_keys = []
+ for batch_id, progress in self._in_memory_store.items():
+ if (progress.status in [BatchStatus.COMPLETED, BatchStatus.FAILED, BatchStatus.CANCELLED] and
+ progress.updated_at < cutoff_str):
+ expired_keys.append(batch_id)
+
+ for key in expired_keys:
+ del self._in_memory_store[key]
+
+ billing_logger.info(
+ "Cleaned up old batch progress entries",
+ cleaned_count=len(expired_keys),
+ cutoff_time=cutoff_str
+ )
+
+ self._last_cleanup = time.time()
+
+ async def get_progress(self, batch_id: str) -> Optional[BatchProgress]:
+ """Get progress information for a batch operation."""
+ await self._cleanup_old_entries()
+
+ # Try cache first
+ try:
+ cached_data = await cache_get_json("batch_progress", None, {"batch_id": batch_id})
+ if cached_data:
+ return BatchProgress.model_validate(cached_data)
+ except Exception as e:
+ billing_logger.debug(f"Cache get failed for batch {batch_id}: {str(e)}")
+
+ # Fall back to in-memory store
+ with self._lock:
+ return self._in_memory_store.get(batch_id)
+
+ async def set_progress(self, progress: BatchProgress) -> None:
+ """Store progress information for a batch operation."""
+ progress.updated_at = datetime.now(timezone.utc).isoformat()
+
+ # Store in cache with TTL
+ try:
+ await cache_set_json(
+ "batch_progress",
+ None,
+ {"batch_id": progress.batch_id},
+ progress.model_dump(),
+ self._retention_period
+ )
+ except Exception as e:
+ billing_logger.debug(f"Cache set failed for batch {progress.batch_id}: {str(e)}")
+
+ # Store in memory as backup
+ with self._lock:
+ self._in_memory_store[progress.batch_id] = progress
+ # Notify subscribers (best-effort)
+ try:
+ await _notify_progress_subscribers(progress)
+ except Exception:
+ pass
+
+ async def delete_progress(self, batch_id: str) -> None:
+ """Delete progress information for a batch operation."""
+ # Note: The current cache service doesn't have a delete function
+ # We'll rely on TTL expiration for cache cleanup
+ # Just remove from in-memory store
+ with self._lock:
+ self._in_memory_store.pop(batch_id, None)
+
+ async def list_active_batches(self) -> List[str]:
+ """List all active batch operations."""
+ await self._cleanup_old_entries()
+
+ active_batches = []
+
+ with self._lock:
+ for batch_id, progress in self._in_memory_store.items():
+ if progress.status in [BatchStatus.PENDING, BatchStatus.RUNNING]:
+ active_batches.append(batch_id)
+
+ return active_batches
+
+
+# Global progress store instance
+progress_store = BatchProgressStore()
+
+
+def _parse_period_month(period: Optional[str]) -> Optional[tuple[date, date]]:
+ """Parse period in the form YYYY-MM and return (start_date, end_date) inclusive.
+ Returns None when period is not provided or invalid.
+ """
+ if not period:
+ return None
+ m = re.fullmatch(r"(\d{4})-(\d{2})", str(period).strip())
+ if not m:
+ return None
+ year = int(m.group(1))
+ month = int(m.group(2))
+ if month < 1 or month > 12:
+ return None
+ from calendar import monthrange
+ last_day = monthrange(year, month)[1]
+ return date(year, month, 1), date(year, month, last_day)
+
+
+def _render_statement_html(
+ *,
+ file_no: str,
+ client_name: Optional[str],
+ matter: Optional[str],
+ as_of_iso: str,
+ period: Optional[str],
+ totals: StatementTotals,
+ unbilled_entries: List[StatementEntry],
+) -> str:
+ """Create a simple, self-contained HTML statement string."""
+ # Rows for unbilled entries
+ def _fmt(val: Optional[float]) -> str:
+ try:
+ return f"{float(val or 0):.2f}"
+ except Exception:
+ return "0.00"
+
+ rows = []
+ for e in unbilled_entries:
+ rows.append(
+ f"
| {e.date.isoformat() if e.date else ''} | {e.t_code} | {(e.description or '').replace('<','<').replace('>','>')} | "
+ f"{_fmt(e.quantity)} | {_fmt(e.rate)} | {_fmt(e.amount)} |
"
+ )
+ rows_html = "\n".join(rows) if rows else "| No unbilled entries |
"
+
+ period_html = f"Period: {period}
" if period else ""
+
+ html = f"""
+
+
+
+
+ Statement {file_no}
+
+
+
+ Statement
+
+
+
+
Charges (billed)
${_fmt(totals.charges_billed)}
+
Charges (unbilled)
${_fmt(totals.charges_unbilled)}
+
Charges (total)
${_fmt(totals.charges_total)}
+
Payments
${_fmt(totals.payments)}
+
Trust balance
${_fmt(totals.trust_balance)}
+
Current balance
${_fmt(totals.current_balance)}
+
+
+ Unbilled Entries
+
+
+
+ | Date |
+ Code |
+ Description |
+ Qty |
+ Rate |
+ Amount |
+
+
+
+ {rows_html}
+
+
+
+
+"""
+ return html
+
+
+def _generate_single_statement(
+ file_no: str,
+ period: Optional[str],
+ db: Session
+) -> GeneratedStatementMeta:
+ """
+ Internal helper to generate a statement for a single file.
+
+ Args:
+ file_no: File number to generate statement for
+ period: Optional period filter (YYYY-MM format)
+ db: Database session
+
+ Returns:
+ GeneratedStatementMeta with file metadata and export path
+
+ Raises:
+ HTTPException: If file not found or generation fails
+ """
+ file_obj = (
+ db.query(File)
+ .options(joinedload(File.owner))
+ .filter(File.file_no == file_no)
+ .first()
+ )
+
+ if not file_obj:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"File {file_no} not found",
+ )
+
+ # Optional period filtering (YYYY-MM)
+ date_range = _parse_period_month(period)
+ q = db.query(Ledger).filter(Ledger.file_no == file_no)
+ if date_range:
+ start_date, end_date = date_range
+ q = q.filter(Ledger.date >= start_date).filter(Ledger.date <= end_date)
+ entries: List[Ledger] = q.all()
+
+ CHARGE_TYPES = {"2", "3", "4"}
+ charges_billed = sum(e.amount for e in entries if e.t_type in CHARGE_TYPES and e.billed == "Y")
+ charges_unbilled = sum(e.amount for e in entries if e.t_type in CHARGE_TYPES and e.billed != "Y")
+ charges_total = charges_billed + charges_unbilled
+ payments_total = sum(e.amount for e in entries if e.t_type == "5")
+ trust_balance = file_obj.trust_bal or 0.0
+ current_balance = charges_total - payments_total
+
+ unbilled_entries = [
+ StatementEntry(
+ id=e.id,
+ date=e.date,
+ t_code=e.t_code,
+ t_type=e.t_type,
+ description=e.note,
+ quantity=e.quantity or 0.0,
+ rate=e.rate or 0.0,
+ amount=e.amount,
+ )
+ for e in entries
+ if e.t_type in CHARGE_TYPES and e.billed != "Y"
+ ]
+
+ client_name = None
+ if file_obj.owner:
+ client_name = f"{file_obj.owner.first or ''} {file_obj.owner.last}".strip()
+
+ as_of_iso = datetime.now(timezone.utc).isoformat()
+ totals_model = StatementTotals(
+ charges_billed=_round(charges_billed),
+ charges_unbilled=_round(charges_unbilled),
+ charges_total=_round(charges_total),
+ payments=_round(payments_total),
+ trust_balance=_round(trust_balance),
+ current_balance=_round(current_balance),
+ )
+
+ # Render HTML
+ html = _render_statement_html(
+ file_no=file_no,
+ client_name=client_name or None,
+ matter=file_obj.regarding,
+ as_of_iso=as_of_iso,
+ period=period,
+ totals=totals_model,
+ unbilled_entries=unbilled_entries,
+ )
+
+ # Ensure exports directory and write file
+ exports_dir = Path("exports")
+ try:
+ exports_dir.mkdir(exist_ok=True)
+ except Exception:
+ # Best-effort: if cannot create, bubble up internal error
+ raise HTTPException(status_code=500, detail="Unable to create exports directory")
+
+ timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S_%f")
+ safe_file_no = str(file_no).replace("/", "_").replace("\\", "_")
+ filename = f"statement_{safe_file_no}_{timestamp}.html"
+ export_path = exports_dir / filename
+ html_bytes = html.encode("utf-8")
+ with open(export_path, "wb") as f:
+ f.write(html_bytes)
+
+ size = export_path.stat().st_size
+
+ return GeneratedStatementMeta(
+ file_no=file_no,
+ client_name=client_name or None,
+ as_of=as_of_iso,
+ period=period,
+ totals=totals_model,
+ unbilled_count=len(unbilled_entries),
+ export_path=str(export_path),
+ filename=filename,
+ size=size,
+ content_type="text/html",
+ )
+
+
+@router.post("/statements/generate", response_model=GeneratedStatementMeta)
+async def generate_statement(
+ payload: GenerateStatementRequest,
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """Generate a simple HTML statement and store it under exports/.
+
+ Returns metadata about the generated artifact.
+ """
+ return _generate_single_statement(payload.file_no, payload.period, db)
+
+
+async def _ws_authenticate(websocket: WebSocket) -> Optional[User]:
+ """Authenticate WebSocket via JWT token in query (?token=) or Authorization header."""
+ token = websocket.query_params.get("token")
+ if not token:
+ try:
+ auth_header = dict(websocket.headers).get("authorization") or ""
+ if auth_header.lower().startswith("bearer "):
+ token = auth_header.split(" ", 1)[1].strip()
+ except Exception:
+ token = None
+ if not token:
+ return None
+ username = verify_token(token)
+ if not username:
+ return None
+ db = SessionLocal()
+ try:
+ user = db.query(User).filter(User.username == username).first()
+ if not user or not user.is_active:
+ return None
+ return user
+ finally:
+ db.close()
+
+
+async def _ws_keepalive(ws: WebSocket, stop_event: asyncio.Event) -> None:
+ try:
+ while not stop_event.is_set():
+ await asyncio.sleep(25)
+ try:
+ await ws.send_json({"type": "ping", "ts": datetime.now(timezone.utc).isoformat()})
+ except Exception:
+ break
+ finally:
+ stop_event.set()
+
+
+@router.websocket("/statements/batch-progress/ws/{batch_id}")
+async def ws_batch_progress(websocket: WebSocket, batch_id: str):
+ """WebSocket: subscribe to real-time updates for a batch_id."""
+ user = await _ws_authenticate(websocket)
+ if not user:
+ await websocket.close(code=4401)
+ return
+ await websocket.accept()
+ # Register
+ async with _subscribers_lock:
+ bucket = _subscribers_by_batch.get(batch_id)
+ if not bucket:
+ bucket = set()
+ _subscribers_by_batch[batch_id] = bucket
+ bucket.add(websocket)
+ # Send initial snapshot
+ try:
+ snapshot = await progress_store.get_progress(batch_id)
+ await websocket.send_json({"type": "progress", "data": snapshot.model_dump() if snapshot else None})
+ except Exception:
+ pass
+ # Keepalive + receive loop
+ stop_event: asyncio.Event = asyncio.Event()
+ ka_task = asyncio.create_task(_ws_keepalive(websocket, stop_event))
+ try:
+ while not stop_event.is_set():
+ try:
+ msg = await websocket.receive_text()
+ except WebSocketDisconnect:
+ break
+ except Exception:
+ break
+ if isinstance(msg, str) and msg.strip() == "ping":
+ try:
+ await websocket.send_text("pong")
+ except Exception:
+ break
+ finally:
+ stop_event.set()
+ try:
+ ka_task.cancel()
+ except Exception:
+ pass
+ async with _subscribers_lock:
+ bucket = _subscribers_by_batch.get(batch_id)
+ if bucket and websocket in bucket:
+ bucket.discard(websocket)
+ if not bucket:
+ _subscribers_by_batch.pop(batch_id, None)
+
+@router.delete("/statements/batch-progress/{batch_id}")
+async def cancel_batch_operation(
+ batch_id: str = PathParam(..., description="Batch operation identifier to cancel"),
+ current_user: User = Depends(get_current_user),
+):
+ """
+ Cancel an active batch statement generation operation.
+
+ **Note:** This endpoint marks the batch as cancelled but does not interrupt
+ currently running file processing. Files already being processed will complete,
+ but pending files will be skipped.
+
+ **Parameters:**
+ - **batch_id**: Unique identifier for the batch operation to cancel
+
+ **Returns:**
+ - Success message confirming cancellation
+
+ **Errors:**
+ - 404: Batch operation not found
+ - 400: Batch operation cannot be cancelled (already completed/failed)
+ """
+ progress = await progress_store.get_progress(batch_id)
+
+ if not progress:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Batch operation {batch_id} not found"
+ )
+
+ if progress.status not in [BatchStatus.PENDING, BatchStatus.RUNNING]:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"Cannot cancel batch operation with status: {progress.status}"
+ )
+
+ # Mark as cancelled
+ progress.status = BatchStatus.CANCELLED
+ progress.completed_at = datetime.now(timezone.utc).isoformat()
+ progress.processing_time_seconds = (
+ datetime.fromisoformat(progress.completed_at.replace('Z', '+00:00')) -
+ datetime.fromisoformat(progress.started_at.replace('Z', '+00:00'))
+ ).total_seconds()
+
+ await progress_store.set_progress(progress)
+
+ billing_logger.info(
+ "Batch operation cancelled",
+ batch_id=batch_id,
+ user_id=getattr(current_user, "id", None),
+ processed_files=progress.processed_files,
+ total_files=progress.total_files
+ )
+
+ return {"message": f"Batch operation {batch_id} has been cancelled"}
+
+
+async def _calculate_estimated_completion(
+ progress: BatchProgress,
+ current_time: datetime
+) -> Optional[str]:
+ """Calculate estimated completion time based on current progress."""
+ if progress.processed_files == 0:
+ return None
+
+ start_time = datetime.fromisoformat(progress.started_at.replace('Z', '+00:00'))
+ elapsed_seconds = (current_time - start_time).total_seconds()
+
+ if elapsed_seconds <= 0:
+ return None
+
+ # Calculate average time per file
+ avg_time_per_file = elapsed_seconds / progress.processed_files
+ remaining_files = progress.total_files - progress.processed_files
+
+ if remaining_files <= 0:
+ return current_time.isoformat()
+
+ estimated_remaining_seconds = avg_time_per_file * remaining_files
+ estimated_completion = current_time + timedelta(seconds=estimated_remaining_seconds)
+
+ return estimated_completion.isoformat()
+
+
+@router.post("/statements/batch-generate", response_model=BatchGenerateStatementResponse)
+async def batch_generate_statements(
+ payload: BatchGenerateStatementRequest,
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """
+ Generate statements for multiple files simultaneously with real-time progress tracking and error handling.
+
+ Processes statements for up to 50 files at once. Individual file failures do not stop the batch operation.
+ Each file is processed independently with detailed error reporting and real-time progress updates.
+
+ **Parameters:**
+ - **file_numbers**: List of file numbers to generate statements for (max 50)
+ - **period**: Optional period filter in YYYY-MM format for all files
+
+ **Returns:**
+ - Detailed batch operation results including:
+ - Total files processed
+ - Success/failure counts and rates
+ - Individual file results with error details
+ - Processing time metrics
+ - Unique batch identifier for progress tracking
+
+ **Features:**
+ - Real-time progress tracking via `/statements/batch-progress/{batch_id}`
+ - Individual file error handling - failures don't stop other files
+ - Estimated completion time calculations
+ - Detailed error reporting per file
+ - Batch operation identification for audit trails
+ - Automatic cleanup of progress data after completion
+ """
+ # Validate request
+ if not payload.file_numbers:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="At least one file number must be provided"
+ )
+
+ if len(payload.file_numbers) > 50:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Maximum 50 files allowed per batch operation"
+ )
+
+ # Remove duplicates while preserving order
+ unique_file_numbers = list(dict.fromkeys(payload.file_numbers))
+
+ # Generate batch ID and timing
+ start_time = datetime.now(timezone.utc)
+ batch_id = f"batch_{start_time.strftime('%Y%m%d_%H%M%S')}_{abs(hash(str(unique_file_numbers))) % 10000:04d}"
+
+ billing_logger.info(
+ "Starting batch statement generation",
+ batch_id=batch_id,
+ total_files=len(unique_file_numbers),
+ file_numbers=unique_file_numbers,
+ period=payload.period,
+ user_id=getattr(current_user, "id", None),
+ user_name=getattr(current_user, "username", None)
+ )
+
+ # Initialize progress tracking
+ progress = BatchProgress(
+ batch_id=batch_id,
+ status=BatchStatus.RUNNING,
+ total_files=len(unique_file_numbers),
+ processed_files=0,
+ successful_files=0,
+ failed_files=0,
+ started_at=start_time.isoformat(),
+ updated_at=start_time.isoformat(),
+ files=[
+ BatchProgressEntry(
+ file_no=file_no,
+ status="pending"
+ ) for file_no in unique_file_numbers
+ ]
+ )
+
+ # Store initial progress
+ await progress_store.set_progress(progress)
+
+ # Track results for final response
+ results: List[BatchFileResult] = []
+ successful = 0
+ failed = 0
+
+ try:
+ # Process each file
+ for idx, file_no in enumerate(unique_file_numbers):
+ current_time = datetime.now(timezone.utc)
+
+ # Check if operation was cancelled
+ current_progress = await progress_store.get_progress(batch_id)
+ if current_progress and current_progress.status == BatchStatus.CANCELLED:
+ billing_logger.info(
+ "Batch operation cancelled, skipping remaining files",
+ batch_id=batch_id,
+ file_no=file_no,
+ remaining_files=len(unique_file_numbers) - idx
+ )
+ break
+
+ # Update progress - mark current file as processing
+ progress.current_file = file_no
+ progress.files[idx].status = "processing"
+ progress.files[idx].started_at = current_time.isoformat()
+ progress.estimated_completion = await _calculate_estimated_completion(progress, current_time)
+ await progress_store.set_progress(progress)
+
+ billing_logger.info(
+ "Processing file statement",
+ batch_id=batch_id,
+ file_no=file_no,
+ progress=f"{idx + 1}/{len(unique_file_numbers)}",
+ progress_percent=round(((idx + 1) / len(unique_file_numbers)) * 100, 1)
+ )
+
+ try:
+ # Generate statement for this file
+ statement_meta = _generate_single_statement(file_no, payload.period, db)
+
+ # Success - update progress
+ completed_time = datetime.now(timezone.utc).isoformat()
+ progress.files[idx].status = "completed"
+ progress.files[idx].completed_at = completed_time
+ progress.files[idx].statement_meta = statement_meta
+ progress.processed_files += 1
+ progress.successful_files += 1
+ successful += 1
+
+ results.append(BatchFileResult(
+ file_no=file_no,
+ status="success",
+ message="Statement generated successfully",
+ statement_meta=statement_meta
+ ))
+
+ billing_logger.info(
+ "File statement generated successfully",
+ batch_id=batch_id,
+ file_no=file_no,
+ filename=statement_meta.filename,
+ size=statement_meta.size
+ )
+
+ except HTTPException as e:
+ # HTTP errors (e.g., file not found)
+ error_msg = e.detail
+ completed_time = datetime.now(timezone.utc).isoformat()
+ progress.files[idx].status = "failed"
+ progress.files[idx].completed_at = completed_time
+ progress.files[idx].error_message = error_msg
+ progress.processed_files += 1
+ progress.failed_files += 1
+ failed += 1
+
+ results.append(BatchFileResult(
+ file_no=file_no,
+ status="failed",
+ message=f"Generation failed: {error_msg}",
+ error_details=str(e.detail)
+ ))
+
+ billing_logger.warning(
+ "File statement generation failed (HTTP error)",
+ batch_id=batch_id,
+ file_no=file_no,
+ error=error_msg,
+ status_code=e.status_code
+ )
+
+ except SQLAlchemyError as e:
+ # Database errors
+ error_msg = f"Database error: {str(e)}"
+ completed_time = datetime.now(timezone.utc).isoformat()
+ progress.files[idx].status = "failed"
+ progress.files[idx].completed_at = completed_time
+ progress.files[idx].error_message = error_msg
+ progress.processed_files += 1
+ progress.failed_files += 1
+ failed += 1
+
+ results.append(BatchFileResult(
+ file_no=file_no,
+ status="failed",
+ message=f"Database error during generation",
+ error_details=error_msg
+ ))
+
+ billing_logger.error(
+ "File statement generation failed (database error)",
+ batch_id=batch_id,
+ file_no=file_no,
+ error=str(e)
+ )
+
+ except Exception as e:
+ # Any other unexpected errors
+ error_msg = f"Unexpected error: {str(e)}"
+ completed_time = datetime.now(timezone.utc).isoformat()
+ progress.files[idx].status = "failed"
+ progress.files[idx].completed_at = completed_time
+ progress.files[idx].error_message = error_msg
+ progress.processed_files += 1
+ progress.failed_files += 1
+ failed += 1
+
+ results.append(BatchFileResult(
+ file_no=file_no,
+ status="failed",
+ message="Unexpected error during generation",
+ error_details=error_msg
+ ))
+
+ billing_logger.error(
+ "File statement generation failed (unexpected error)",
+ batch_id=batch_id,
+ file_no=file_no,
+ error=str(e),
+ error_type=type(e).__name__
+ )
+
+ # Update progress after each file
+ await progress_store.set_progress(progress)
+
+ # Mark batch as completed
+ end_time = datetime.now(timezone.utc)
+ progress.status = BatchStatus.COMPLETED
+ progress.completed_at = end_time.isoformat()
+ progress.current_file = None
+ progress.processing_time_seconds = (end_time - start_time).total_seconds()
+ progress.success_rate = (successful / len(unique_file_numbers) * 100) if len(unique_file_numbers) > 0 else 0
+ progress.estimated_completion = None
+ await progress_store.set_progress(progress)
+
+ except Exception as e:
+ # Handle batch-level failures
+ end_time = datetime.now(timezone.utc)
+ progress.status = BatchStatus.FAILED
+ progress.completed_at = end_time.isoformat()
+ progress.error_message = f"Batch operation failed: {str(e)}"
+ progress.processing_time_seconds = (end_time - start_time).total_seconds()
+ await progress_store.set_progress(progress)
+
+ billing_logger.error(
+ "Batch statement generation failed",
+ batch_id=batch_id,
+ error=str(e),
+ error_type=type(e).__name__
+ )
+
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Batch operation failed: {str(e)}"
+ )
+
+ # Calculate final metrics
+ processing_time = (end_time - start_time).total_seconds()
+ total_files = len(unique_file_numbers)
+ success_rate = (successful / total_files * 100) if total_files > 0 else 0
+
+ billing_logger.info(
+ "Batch statement generation completed",
+ batch_id=batch_id,
+ total_files=total_files,
+ successful=successful,
+ failed=failed,
+ success_rate=success_rate,
+ processing_time_seconds=processing_time
+ )
+
+ # Persist batch summary and per-file results
+ try:
+ def _parse_iso(dt: Optional[str]):
+ if not dt:
+ return None
+ try:
+ from datetime import datetime as _dt
+ return _dt.fromisoformat(dt.replace('Z', '+00:00'))
+ except Exception:
+ return None
+
+ batch_row = BillingBatch(
+ batch_id=batch_id,
+ status=str(progress.status),
+ total_files=total_files,
+ successful_files=successful,
+ failed_files=failed,
+ started_at=_parse_iso(progress.started_at),
+ updated_at=_parse_iso(progress.updated_at),
+ completed_at=_parse_iso(progress.completed_at),
+ processing_time_seconds=processing_time,
+ success_rate=success_rate,
+ error_message=progress.error_message,
+ )
+ db.add(batch_row)
+ for f in progress.files:
+ meta = getattr(f, 'statement_meta', None)
+ filename = None
+ size = None
+ if meta is not None:
+ try:
+ filename = getattr(meta, 'filename', None)
+ size = getattr(meta, 'size', None)
+ except Exception:
+ pass
+ if filename is None and isinstance(meta, dict):
+ filename = meta.get('filename')
+ size = meta.get('size')
+ db.add(BillingBatchFile(
+ batch_id=batch_id,
+ file_no=f.file_no,
+ status=str(f.status),
+ error_message=f.error_message,
+ filename=filename,
+ size=size,
+ started_at=_parse_iso(f.started_at),
+ completed_at=_parse_iso(f.completed_at),
+ ))
+ db.commit()
+ except Exception:
+ try:
+ db.rollback()
+ except Exception:
+ pass
+
+ return BatchGenerateStatementResponse(
+ batch_id=batch_id,
+ total_files=total_files,
+ successful=successful,
+ failed=failed,
+ success_rate=round(success_rate, 2),
+ started_at=start_time.isoformat(),
+ completed_at=end_time.isoformat(),
+ processing_time_seconds=round(processing_time, 2),
+ results=results
+ )
+
+
+class StatementFileMeta(BaseModel):
+ """Metadata for a generated statement file."""
+ filename: str = Field(..., description="The filename of the generated statement")
+ size: int = Field(..., description="File size in bytes")
+ created: str = Field(..., description="ISO timestamp when the file was created")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "filename": "statement_ABC-123_20240115_143022.html",
+ "size": 2048,
+ "created": "2024-01-15T14:30:22.123456+00:00"
+ }
+ }
+ )
+
+
+class DeleteStatementResponse(BaseModel):
+ """Response for successful statement deletion."""
+ message: str = Field(..., description="Success message")
+ filename: str = Field(..., description="Name of the deleted file")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "message": "Statement deleted successfully",
+ "filename": "statement_ABC-123_20240115_143022.html"
+ }
+ }
+ )
+
+
+@router.get("/statements/{file_no}/list", response_model=List[StatementFileMeta])
+async def list_generated_statements(
+ file_no: str = PathParam(..., description="File number to list statements for"),
+ period: Optional[str] = Query(
+ None,
+ description="Optional period filter in YYYY-MM format (e.g., '2024-01')",
+ pattern=r"^\d{4}-\d{2}$"
+ ),
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """List generated statement files for a specific file number.
+
+ Returns metadata for all generated statement HTML files, sorted by creation time (newest first).
+ Optionally filter by billing period using the period parameter.
+
+ **Parameters:**
+ - **file_no**: The file number to list statements for
+ - **period**: Optional filter for statements from a specific billing period (YYYY-MM format)
+
+ **Returns:**
+ - List of statement file metadata including filename, size, and creation timestamp
+
+ **Errors:**
+ - 404: File not found or no statements exist
+ """
+ # Ensure file exists
+ file_obj = db.query(File).filter(File.file_no == file_no).first()
+ if not file_obj:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="File not found",
+ )
+
+ exports_dir = Path("exports")
+ if not exports_dir.exists():
+ return []
+
+ safe_file_no = str(file_no).replace("/", "_").replace("\\", "_")
+ candidates = list(exports_dir.glob(f"statement_{safe_file_no}_*.html"))
+ if not candidates:
+ return []
+
+ # Optional filter by period by inspecting HTML content
+ if period:
+ filtered: List[Path] = []
+ search_token = f"Period: {period}"
+ for path in candidates:
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ content = f.read()
+ if search_token in content:
+ filtered.append(path)
+ except Exception:
+ continue
+ candidates = filtered
+
+ # Sort newest first by modification time
+ candidates.sort(key=lambda p: p.stat().st_mtime, reverse=True)
+
+ result: List[StatementFileMeta] = []
+ for path in candidates:
+ try:
+ st = path.stat()
+ created_iso = datetime.fromtimestamp(st.st_mtime, timezone.utc).isoformat()
+ result.append(StatementFileMeta(filename=path.name, size=st.st_size, created=created_iso))
+ except FileNotFoundError:
+ continue
+ return result
+
+
+@router.delete("/statements/{file_no}/{filename}", response_model=DeleteStatementResponse)
+async def delete_generated_statement(
+ file_no: str = PathParam(..., description="File number that owns the statement"),
+ filename: str = PathParam(..., description="Name of the statement file to delete"),
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """Delete a specific generated statement file.
+
+ Securely deletes a statement HTML file that belongs to the specified file number.
+ Security constraints ensure users can only delete statements that belong to the specified file_no.
+
+ **Parameters:**
+ - **file_no**: The file number that owns the statement
+ - **filename**: Name of the statement file to delete (must match expected naming pattern)
+
+ **Returns:**
+ - Success message and deleted filename
+
+ **Security:**
+ - Only allows deletion of files matching the expected naming pattern for the file_no
+ - Prevents cross-file statement deletion and path traversal attacks
+
+ **Errors:**
+ - 404: File not found, statement file not found, or security validation failed
+ - 500: File deletion failed
+ """
+ # Ensure file exists
+ file_obj = db.query(File).filter(File.file_no == file_no).first()
+ if not file_obj:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="File not found",
+ )
+
+ exports_dir = Path("exports")
+ if not exports_dir.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Statement not found")
+
+ # Security: ensure filename matches expected pattern for this file_no
+ safe_file_no = str(file_no).replace("/", "_").replace("\\", "_")
+ expected_prefix = f"statement_{safe_file_no}_"
+ if not filename.startswith(expected_prefix) or not filename.endswith(".html"):
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Statement not found",
+ )
+
+ statement_path = exports_dir / filename
+ if not statement_path.exists():
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Statement not found",
+ )
+
+ try:
+ statement_path.unlink()
+ return DeleteStatementResponse(
+ message="Statement deleted successfully",
+ filename=filename
+ )
+ except Exception as e:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Failed to delete statement: {str(e)}",
+ )
+
+
+@router.get("/statements/{file_no}/download", responses={
+ 200: {
+ "description": "Statement HTML file",
+ "content": {"text/html": {}},
+ "headers": {
+ "content-disposition": {
+ "description": "Attachment header with filename",
+ "schema": {"type": "string"}
+ }
+ }
+ },
+ 404: {"description": "File or statement not found"}
+})
+async def download_latest_statement(
+ file_no: str = PathParam(..., description="File number to download statement for"),
+ period: Optional[str] = Query(
+ None,
+ description="Optional period filter in YYYY-MM format (e.g., '2024-01')",
+ pattern=r"^\d{4}-\d{2}$"
+ ),
+ db: Session = Depends(get_db),
+ current_user: User = Depends(get_current_user),
+):
+ """Download the most recent generated statement HTML file for a file number.
+
+ Returns the newest statement file as an HTML attachment. Optionally filter to find
+ the newest statement from a specific billing period.
+
+ **Parameters:**
+ - **file_no**: The file number to download statement for
+ - **period**: Optional filter for statements from a specific billing period (YYYY-MM format)
+
+ **Returns:**
+ - HTML file as attachment with appropriate content-disposition header
+
+ **Errors:**
+ - 404: File not found, no statements exist, or no statements match period filter
+ """
+ # Ensure file exists
+ file_obj = db.query(File).filter(File.file_no == file_no).first()
+ if not file_obj:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="File not found",
+ )
+
+ exports_dir = Path("exports")
+ if not exports_dir.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No statements found")
+
+ safe_file_no = str(file_no).replace("/", "_").replace("\\", "_")
+ candidates = list(exports_dir.glob(f"statement_{safe_file_no}_*.html"))
+ if not candidates:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No statements found")
+
+ # Optional filter by period by inspecting HTML content
+ if period:
+ filtered = []
+ search_token = f"Period: {period}"
+ for path in candidates:
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ content = f.read()
+ if search_token in content:
+ filtered.append(path)
+ except Exception:
+ # Skip unreadable files
+ continue
+ candidates = filtered
+ if not candidates:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="No statements found for requested period",
+ )
+
+ # Choose latest by modification time
+ candidates.sort(key=lambda p: p.stat().st_mtime, reverse=True)
+ latest_path = candidates[0]
+
+ return FileResponse(
+ latest_path,
+ media_type="text/html",
+ filename=latest_path.name,
+ )
diff --git a/app/database/schema_updates.py b/app/database/schema_updates.py
index f3d60e8..ff10628 100644
--- a/app/database/schema_updates.py
+++ b/app/database/schema_updates.py
@@ -18,6 +18,32 @@ def ensure_schema_updates(engine: Engine) -> None:
"""Ensure missing columns are added for backward-compatible updates."""
# Map of table -> {column: SQL type}
updates: Dict[str, Dict[str, str]] = {
+ # Billing batch history (lightweight persistence)
+ "billing_batches": {
+ "id": "INTEGER",
+ "batch_id": "TEXT",
+ "status": "TEXT",
+ "total_files": "INTEGER",
+ "successful_files": "INTEGER",
+ "failed_files": "INTEGER",
+ "started_at": "DATETIME",
+ "updated_at": "DATETIME",
+ "completed_at": "DATETIME",
+ "processing_time_seconds": "FLOAT",
+ "success_rate": "FLOAT",
+ "error_message": "TEXT",
+ },
+ "billing_batch_files": {
+ "id": "INTEGER",
+ "batch_id": "TEXT",
+ "file_no": "TEXT",
+ "status": "TEXT",
+ "error_message": "TEXT",
+ "filename": "TEXT",
+ "size": "INTEGER",
+ "started_at": "DATETIME",
+ "completed_at": "DATETIME",
+ },
# Forms
"form_index": {
"keyword": "TEXT",
diff --git a/app/main.py b/app/main.py
index 825cc44..5a38322 100644
--- a/app/main.py
+++ b/app/main.py
@@ -83,6 +83,7 @@ from app.api.customers import router as customers_router
from app.api.files import router as files_router
from app.api.financial import router as financial_router
from app.api.documents import router as documents_router
+from app.api.billing import router as billing_router
from app.api.search import router as search_router
from app.api.admin import router as admin_router
from app.api.import_data import router as import_router
@@ -99,6 +100,7 @@ app.include_router(auth_router, prefix="/api/auth", tags=["authentication"])
app.include_router(customers_router, prefix="/api/customers", tags=["customers"])
app.include_router(files_router, prefix="/api/files", tags=["files"])
app.include_router(financial_router, prefix="/api/financial", tags=["financial"])
+app.include_router(billing_router, prefix="/api/billing", tags=["billing"])
app.include_router(documents_router, prefix="/api/documents", tags=["documents"])
app.include_router(search_router, prefix="/api/search", tags=["search"])
app.include_router(admin_router, prefix="/api/admin", tags=["admin"])
@@ -157,6 +159,13 @@ async def financial_page(request: Request):
"financial.html",
{"request": request, "title": "Financial/Ledger - " + settings.app_name}
)
+@app.get("/billing", response_class=HTMLResponse)
+async def billing_page(request: Request):
+ """Billing Statements page"""
+ return templates.TemplateResponse(
+ "billing.html",
+ {"request": request, "title": "Billing Statements - " + settings.app_name}
+ )
@app.get("/documents", response_class=HTMLResponse)
diff --git a/app/models/__init__.py b/app/models/__init__.py
index 6c7d839..e019e81 100644
--- a/app/models/__init__.py
+++ b/app/models/__init__.py
@@ -17,6 +17,7 @@ from .pensions import (
SeparationAgreement, LifeTable, NumberTable, PensionResult
)
from .templates import DocumentTemplate, DocumentTemplateVersion, TemplateKeyword
+from .billing import BillingBatch, BillingBatchFile
from .lookups import (
Employee, FileType, FileStatus, TransactionType, TransactionCode,
State, GroupLookup, Footer, PlanInfo, FormIndex, FormList,
@@ -32,5 +33,6 @@ __all__ = [
"SeparationAgreement", "LifeTable", "NumberTable", "PensionResult",
"Employee", "FileType", "FileStatus", "TransactionType", "TransactionCode",
"State", "GroupLookup", "Footer", "PlanInfo", "FormIndex", "FormList",
- "PrinterSetup", "SystemSetup", "FormKeyword", "TemplateKeyword"
+ "PrinterSetup", "SystemSetup", "FormKeyword", "TemplateKeyword",
+ "BillingBatch", "BillingBatchFile"
]
\ No newline at end of file
diff --git a/app/models/billing.py b/app/models/billing.py
new file mode 100644
index 0000000..3c5906b
--- /dev/null
+++ b/app/models/billing.py
@@ -0,0 +1,47 @@
+from sqlalchemy import Column, Integer, String, DateTime, Float, Text, Index
+from app.models.base import BaseModel
+
+
+class BillingBatch(BaseModel):
+ __tablename__ = "billing_batches"
+
+ id = Column(Integer, primary_key=True, index=True)
+ batch_id = Column(String(100), unique=True, nullable=False, index=True)
+ status = Column(String(32), nullable=False)
+ total_files = Column(Integer, nullable=False, default=0)
+ successful_files = Column(Integer, nullable=False, default=0)
+ failed_files = Column(Integer, nullable=False, default=0)
+ started_at = Column(DateTime(timezone=True), nullable=False)
+ updated_at = Column(DateTime(timezone=True))
+ completed_at = Column(DateTime(timezone=True))
+ processing_time_seconds = Column(Float)
+ success_rate = Column(Float)
+ error_message = Column(Text)
+
+ __table_args__ = (
+ Index("ix_billing_batches_started_at", "started_at"),
+ Index("ix_billing_batches_updated_at", "updated_at"),
+ Index("ix_billing_batches_completed_at", "completed_at"),
+ {},
+ )
+
+
+class BillingBatchFile(BaseModel):
+ __tablename__ = "billing_batch_files"
+
+ id = Column(Integer, primary_key=True, index=True)
+ batch_id = Column(String(100), nullable=False, index=True)
+ file_no = Column(String(50), nullable=False, index=True)
+ status = Column(String(32), nullable=False)
+ error_message = Column(Text)
+ filename = Column(String(255))
+ size = Column(Integer)
+ started_at = Column(DateTime(timezone=True))
+ completed_at = Column(DateTime(timezone=True))
+
+ __table_args__ = (
+ Index("ix_billing_batch_files_batch_file", "batch_id", "file_no"),
+ {},
+ )
+
+
diff --git a/app/models/user.py b/app/models/user.py
index f342e1a..e700e0f 100644
--- a/app/models/user.py
+++ b/app/models/user.py
@@ -22,9 +22,9 @@ class User(BaseModel):
full_name = Column(String(100)) # Keep for backward compatibility
# Authorization
- is_active = Column(Boolean, default=True)
- is_admin = Column(Boolean, default=False)
- is_approver = Column(Boolean, default=False)
+ is_active = Column(Boolean, default=True, nullable=False)
+ is_admin = Column(Boolean, default=False, nullable=False)
+ is_approver = Column(Boolean, default=False, nullable=False)
# User Preferences
theme_preference = Column(String(10), default='light') # 'light', 'dark'
diff --git a/docs/MISSING_FEATURES_TODO.md b/docs/MISSING_FEATURES_TODO.md
index 52c2453..4e0e21b 100644
--- a/docs/MISSING_FEATURES_TODO.md
+++ b/docs/MISSING_FEATURES_TODO.md
@@ -124,48 +124,48 @@ POST /api/documents/generate-batch
**Legacy Feature**: Specialized module for Qualified Domestic Relations Orders
-**Current Status**: ❌ Not implemented
+**Current Status**: ✅ **COMPLETED**
**Required Components**:
#### 2.1 QDRO Data Model
-- [ ] Create `QDRO` model
+- [x] Create `QDRO` model
- File number reference
- Version tracking
- Plan information (name, type, administrator)
- Participant details (employee, spouse/ex-spouse)
- Division methodology (percentage, dollar amount, etc.)
- Effective dates and conditions
-- [ ] Plan information database
-- [ ] QDRO version management
+- [x] Plan information database
+- [x] QDRO version management
#### 2.2 QDRO-Specific Forms
-- [ ] QDRO data entry interface
-- [ ] Plan information management
-- [ ] Participant role management
-- [ ] Division calculation tools
+- [x] QDRO data entry interface
+- [x] Plan information management
+- [x] Participant role management
+- [x] Division calculation tools
#### 2.3 QDRO Document Generation
-- [ ] QDRO-specific templates
-- [ ] Integration with document assembly system
-- [ ] Version control for QDRO revisions
-- [ ] Court approval tracking
+- [x] QDRO-specific templates
+- [x] Integration with document assembly system
+- [x] Version control for QDRO revisions
+- [x] Court approval tracking
**API Endpoints Needed**:
```
-POST /api/qdros
-GET /api/qdros/{file_no}
-PUT /api/qdros/{id}
-POST /api/qdros/{id}/generate-document
-GET /api/qdros/{id}/versions
-POST /api/plan-info
+✅ POST /api/qdros
+✅ GET /api/qdros/{file_no}
+✅ PUT /api/qdros/{id}
+✅ POST /api/qdros/{id}/generate-document
+✅ GET /api/qdros/{id}/versions
+✅ POST /api/plan-info
```
### 🔴 3. Advanced Billing & Statement Generation
**Legacy Feature**: Automated billing statement generation with trust account management
-**Current Status**: ⚠️ Partially implemented (basic transactions exist)
+**Current Status**: 🟡 **IN PROGRESS** (basic transactions exist, working on statement generation)
**Missing Components**:
diff --git a/nginx/nginx.conf b/nginx/nginx.conf
index bf5b9ff..f9fc09d 100644
--- a/nginx/nginx.conf
+++ b/nginx/nginx.conf
@@ -90,6 +90,10 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
+ # WebSocket support
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
}
# Main application
diff --git a/static/js/batch-progress.js b/static/js/batch-progress.js
new file mode 100644
index 0000000..72895b3
--- /dev/null
+++ b/static/js/batch-progress.js
@@ -0,0 +1,136 @@
+/**
+ * Batch Progress Realtime client.
+ * - Tries WebSocket first
+ * - Falls back to HTTP polling on failure
+ * - Auto heartbeats and reconnection with backoff
+ */
+(function(){
+ window.progress = window.progress || {};
+
+ function getAuthToken() {
+ try {
+ return (window.app && window.app.token) || localStorage.getItem('auth_token') || null;
+ } catch (_) {
+ return null;
+ }
+ }
+
+ function buildWsUrl(path) {
+ const loc = window.location;
+ const proto = loc.protocol === 'https:' ? 'wss:' : 'ws:';
+ const token = encodeURIComponent(getAuthToken() || '');
+ const sep = path.includes('?') ? '&' : '?';
+ return `${proto}//${loc.host}${path}${sep}token=${token}`;
+ }
+
+ function defaultOnUpdate(){/* no-op */}
+ function defaultOnError(){/* no-op */}
+
+ /**
+ * Subscribe to a batch progress stream.
+ * @param {string} batchId
+ * @param {(data: object|null) => void} onUpdate
+ * @param {(error: Error|string) => void} onError
+ * @param {number} pollIntervalMs
+ * @returns {() => void} unsubscribe function
+ */
+ function subscribe(batchId, onUpdate = defaultOnUpdate, onError = defaultOnError, pollIntervalMs = 2000) {
+ let ws = null;
+ let closed = false;
+ let pollTimer = null;
+ let backoffMs = 1000;
+
+ async function pollOnce() {
+ try {
+ const resp = await window.http.wrappedFetch(`/api/billing/statements/batch-progress/${encodeURIComponent(batchId)}`);
+ if (!resp.ok) {
+ const err = await window.http.toError(resp, 'Failed to fetch batch progress');
+ throw err;
+ }
+ const json = await resp.json();
+ onUpdate(json);
+ } catch (e) {
+ onError(e);
+ }
+ }
+
+ function startPolling() {
+ if (closed) return;
+ if (pollTimer) clearInterval(pollTimer);
+ pollTimer = setInterval(pollOnce, pollIntervalMs);
+ // immediate first fetch
+ pollOnce();
+ }
+
+ function stopPolling() {
+ if (pollTimer) {
+ clearInterval(pollTimer);
+ pollTimer = null;
+ }
+ }
+
+ function tryWebSocket() {
+ const url = buildWsUrl(`/api/billing/statements/batch-progress/ws/${encodeURIComponent(batchId)}`);
+ try {
+ ws = new WebSocket(url);
+ } catch (e) {
+ onError(e);
+ startPolling();
+ return;
+ }
+
+ let pingTimer = null;
+
+ ws.onopen = function() {
+ stopPolling();
+ backoffMs = 1000;
+ // send heartbeat pings at 30s
+ pingTimer = setInterval(function(){
+ try { ws.send(JSON.stringify({type: 'ping'})); } catch (_) {}
+ }, 30000);
+ };
+
+ ws.onmessage = function(ev) {
+ try {
+ const msg = JSON.parse(ev.data);
+ if (msg && msg.type === 'progress') {
+ onUpdate(msg.data);
+ }
+ } catch (_) {
+ // ignore
+ }
+ };
+
+ ws.onerror = function(ev) {
+ onError(new Error('WebSocket error'));
+ };
+
+ ws.onclose = function() {
+ if (pingTimer) { clearInterval(pingTimer); pingTimer = null; }
+ if (closed) return;
+ // graceful fallback to polling and schedule reconnect with backoff
+ startPolling();
+ setTimeout(function(){
+ if (!closed) {
+ backoffMs = Math.min(backoffMs * 2, 30000);
+ tryWebSocket();
+ }
+ }, backoffMs);
+ };
+ }
+
+ // Kick off
+ tryWebSocket();
+
+ return function unsubscribe() {
+ closed = true;
+ stopPolling();
+ try { if (ws && ws.readyState <= 1) ws.close(); } catch(_) {}
+ ws = null;
+ };
+ }
+
+ window.progress.subscribe = subscribe;
+})();
+
+
diff --git a/static/js/main.js b/static/js/main.js
index e4324aa..0fccb31 100644
--- a/static/js/main.js
+++ b/static/js/main.js
@@ -16,6 +16,7 @@ const app = {
document.addEventListener('DOMContentLoaded', function() {
try { setupGlobalErrorHandlers(); } catch (_) {}
initializeApp();
+ try { initializeBatchProgressUI(); } catch (_) {}
});
// Theme Management (centralized)
@@ -123,6 +124,141 @@ async function initializeApp() {
console.log('Delphi Database System initialized');
}
+// Live Batch Progress (Admin Overview)
+function initializeBatchProgressUI() {
+ const listEl = document.getElementById('batchProgressList');
+ const emptyEl = document.getElementById('batchProgressEmpty');
+ const refreshBtn = document.getElementById('refreshBatchesBtn');
+ if (!listEl || !emptyEl) return;
+
+ const subscriptions = new Map();
+
+ function percent(progress) {
+ if (!progress || !progress.total_files) return 0;
+ const done = Number(progress.processed_files || 0);
+ const total = Number(progress.total_files || 0);
+ return Math.max(0, Math.min(100, Math.round((done / total) * 100)));
+ }
+
+ function renderRow(progress) {
+ const pid = progress.batch_id;
+ const pct = percent(progress);
+ const status = String(progress.status || '').toUpperCase();
+ const current = progress.current_file || '';
+ const success = progress.successful_files || 0;
+ const failed = progress.failed_files || 0;
+ const total = progress.total_files || 0;
+
+ return (
+`
+
+
+ ${pid}
+ ${status}
+
+
+ ${success}/${total} ✓ • ${failed} ✕
+
+
+
+
+
+ ${pct}%
+ ${current ? 'Current: '+current : ''}
+
+
`
+ );
+ }
+
+ async function fetchActiveBatches() {
+ const resp = await window.http.wrappedFetch('/api/billing/statements/batch-list');
+ if (!resp.ok) return [];
+ return await resp.json();
+ }
+
+ function updateEmptyState() {
+ const hasRows = listEl.children.length > 0;
+ emptyEl.style.display = hasRows ? 'none' : '';
+ }
+
+ function upsertRow(data) {
+ const pid = data && data.batch_id ? data.batch_id : null;
+ if (!pid) return;
+ let row = listEl.querySelector(`[data-batch="${pid}"]`);
+ const html = renderRow(data);
+ if (row) {
+ row.outerHTML = html;
+ } else {
+ const container = document.createElement('div');
+ container.innerHTML = html;
+ listEl.prepend(container.firstChild);
+ }
+ updateEmptyState();
+ }
+
+ async function cancelBatch(batchId) {
+ try {
+ const resp = await window.http.wrappedFetch(`/api/billing/statements/batch-progress/${encodeURIComponent(batchId)}`, { method: 'DELETE' });
+ if (!resp.ok) {
+ throw await window.http.toError(resp, 'Failed to cancel batch');
+ }
+ // Let stream update the row; no-op here
+ } catch (e) {
+ console.warn('Cancel failed', e);
+ try { alert(window.http.formatAlert(e, 'Cancel failed')); } catch (_) {}
+ }
+ }
+
+ function attachRowHandlers() {
+ listEl.addEventListener('click', function(ev){
+ const btn = ev.target.closest('[data-action="cancel"]');
+ if (!btn) return;
+ const row = ev.target.closest('[data-batch]');
+ if (!row) return;
+ const pid = row.getAttribute('data-batch');
+ cancelBatch(pid);
+ });
+ }
+
+ async function subscribeTo(pid) {
+ if (!window.progress || typeof window.progress.subscribe !== 'function') return;
+ if (subscriptions.has(pid)) return;
+ const unsub = window.progress.subscribe(pid, function(progress){
+ if (!progress) return;
+ upsertRow(progress);
+ const status = String(progress.status || '').toUpperCase();
+ if (status === 'COMPLETED' || status === 'FAILED' || status === 'CANCELLED') {
+ // Auto-unsubscribe once terminal
+ const fn = subscriptions.get(pid);
+ if (fn) { try { fn(); } catch (_) {} }
+ subscriptions.delete(pid);
+ }
+ }, function(err){
+ // Non-fatal; polling fallback is handled inside subscribe()
+ console.debug('progress stream issue', err && err.message ? err.message : err);
+ });
+ subscriptions.set(pid, unsub);
+ }
+
+ async function refresh() {
+ const batches = await fetchActiveBatches();
+ if (!Array.isArray(batches)) return;
+ if (batches.length === 0) updateEmptyState();
+ for (const pid of batches) {
+ subscribeTo(pid);
+ }
+ }
+
+ if (refreshBtn) {
+ refreshBtn.addEventListener('click', function(){ refresh(); });
+ }
+
+ attachRowHandlers();
+ refresh();
+}
+
// Form validation
function initializeFormValidation() {
// Native validation handling
diff --git a/templates/admin.html b/templates/admin.html
index 1b99d7d..8327466 100644
--- a/templates/admin.html
+++ b/templates/admin.html
@@ -160,6 +160,22 @@
+
+
+
+
+
+ Live Batch Progress
+
+
+
+
+
@@ -1083,7 +1099,17 @@
At least one of Email or Webhook URL must be provided. Secret is only updated if a new value is entered.
@@ -1325,6 +1351,9 @@ function showCreateNotificationRouteModal() {
document.getElementById('routeScope').disabled = false;
document.getElementById('routeIdentifier').readOnly = false;
document.getElementById('qdroRouteForm').reset();
+ // Reset secret field to password type
+ document.getElementById('routeWebhookSecret').type = 'password';
+ document.getElementById('webhook-secret-eye').className = 'fas fa-eye text-sm';
openModal('qdroRouteModal');
}
@@ -1348,6 +1377,9 @@ function editNotificationRoute(scope, identifier) {
document.getElementById('routeWebhookUrl').value = link ? link.getAttribute('href') : '';
document.getElementById('routeWebhookSecret').value = '';
}
+ // Reset secret field to password type
+ document.getElementById('routeWebhookSecret').type = 'password';
+ document.getElementById('webhook-secret-eye').className = 'fas fa-eye text-sm';
openModal('qdroRouteModal');
}
@@ -1416,6 +1448,30 @@ async function deleteNotificationRoute(scope, identifier) {
}
}
+// Webhook Secret UI helpers
+function toggleWebhookSecretVisibility() {
+ const input = document.getElementById('routeWebhookSecret');
+ const eye = document.getElementById('webhook-secret-eye');
+ if (input.type === 'password') {
+ input.type = 'text';
+ eye.className = 'fas fa-eye-slash text-sm';
+ } else {
+ input.type = 'password';
+ eye.className = 'fas fa-eye text-sm';
+ }
+}
+
+function generateRandomSecret() {
+ // Generate a secure random string (base64url-safe)
+ const array = new Uint8Array(32); // 32 bytes = 256 bits
+ crypto.getRandomValues(array);
+ const secret = Array.from(array, byte =>
+ 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'[byte % 64]
+ ).join('');
+ document.getElementById('routeWebhookSecret').value = secret;
+ showAlert('Random secret generated', 'success');
+}
+
// Basic HTML escaping helpers
function escapeHtml(str) {
if (str == null) return '';
diff --git a/templates/base.html b/templates/base.html
index 2bdd727..a3fcb85 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -43,6 +43,10 @@
Ledger
+
+
+ Billing
+
Documents
@@ -113,6 +117,10 @@
Ledger
+
+
+ Billing
+
Documents
@@ -403,6 +411,7 @@
+
{% block extra_scripts %}{% endblock %}
diff --git a/templates/billing.html b/templates/billing.html
new file mode 100644
index 0000000..6c25a6a
--- /dev/null
+++ b/templates/billing.html
@@ -0,0 +1,353 @@
+{% extends "base.html" %}
+
+{% block title %}Billing Statements - Delphi Database{% endblock %}
+
+{% block content %}
+
+
+
+
+
+
Batch History
+
+
+
+
+
+
+
+ | Batch ID |
+ Status |
+ Files |
+ Started |
+ Updated |
+ Completed |
+ Actions |
+
+
+
+
+
+
No batches found
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Batch Details
+
+
+
+
+
+
+
+
+ | File No |
+ Status |
+ Message |
+ Actions |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{% endblock %}
+
+{% block extra_scripts %}
+
+{% endblock %}
+
+
diff --git a/tests/test_billing_statements_api.py b/tests/test_billing_statements_api.py
new file mode 100644
index 0000000..31aee2c
--- /dev/null
+++ b/tests/test_billing_statements_api.py
@@ -0,0 +1,1511 @@
+import os
+from datetime import date
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+# Ensure required env vars for app import/config
+os.environ.setdefault("SECRET_KEY", "x" * 32)
+os.environ.setdefault("DATABASE_URL", "sqlite:////tmp/delphi_test.sqlite")
+
+from app.main import app # noqa: E402
+from app.auth.security import get_current_user # noqa: E402
+
+
+@pytest.fixture(scope="module")
+def client():
+ # Override auth to bypass JWT for these tests
+ class _User:
+ def __init__(self):
+ self.id = "test"
+ self.username = "tester"
+ self.is_admin = True
+ self.is_active = True
+
+ app.dependency_overrides[get_current_user] = lambda: _User()
+
+ try:
+ yield TestClient(app)
+ finally:
+ app.dependency_overrides.pop(get_current_user, None)
+
+
+def _create_customer(client: TestClient) -> str:
+ from uuid import uuid4
+ customer_id = f"BILL-CUST-{uuid4().hex[:8]}"
+ payload = {"id": customer_id, "last": "Billing", "email": "billing@example.com"}
+ resp = client.post("/api/customers/", json=payload)
+ assert resp.status_code == 200
+ return customer_id
+
+
+def _create_file(client: TestClient, owner_id: str) -> str:
+ from uuid import uuid4
+ file_no = f"B-{uuid4().hex[:6]}"
+ payload = {
+ "file_no": file_no,
+ "id": owner_id,
+ "regarding": "Billing matter",
+ "empl_num": "E01",
+ "file_type": "CIVIL",
+ "opened": date.today().isoformat(),
+ "status": "ACTIVE",
+ "rate_per_hour": 150.0,
+ "memo": "Created by pytest",
+ }
+ resp = client.post("/api/files/", json=payload)
+ assert resp.status_code == 200
+ return file_no
+
+
+def test_statement_empty_account(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.get(f"/api/billing/statements/{file_no}")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["file_no"] == file_no
+ assert data["totals"]["charges_billed"] == 0
+ assert data["totals"]["charges_unbilled"] == 0
+ assert data["totals"]["charges_total"] == 0
+ assert data["totals"]["payments"] == 0
+ assert data["totals"]["current_balance"] == 0
+ assert isinstance(data["unbilled_entries"], list) and len(data["unbilled_entries"]) == 0
+
+
+def test_statement_with_mixed_entries_and_rounding(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Time entry unbilled: 1.25h * 150 = 187.5
+ payload_time = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "TIME",
+ "t_type": "2",
+ "empl_num": "E01",
+ "quantity": 1.25,
+ "rate": 150.0,
+ "amount": 187.5,
+ "billed": "N",
+ "note": "Work 1",
+ }
+ resp = client.post("/api/financial/ledger/", json=payload_time)
+ assert resp.status_code == 200
+
+ # Flat fee billed: 300
+ payload_flat = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "FLAT",
+ "t_type": "3",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 300.0,
+ "billed": "Y",
+ "note": "Flat fee",
+ }
+ resp = client.post("/api/financial/ledger/", json=payload_flat)
+ assert resp.status_code == 200
+
+ # Disbursement unbilled: 49.995 (rounds to 50.00)
+ payload_disb = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "MISC",
+ "t_type": "4",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 49.995,
+ "billed": "N",
+ "note": "Expense",
+ }
+ resp = client.post("/api/financial/ledger/", json=payload_disb)
+ assert resp.status_code == 200
+
+ # Payment: 100
+ payload_payment = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "PMT",
+ "t_type": "5",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 100.0,
+ "billed": "Y",
+ "note": "Payment",
+ }
+ resp = client.post("/api/financial/ledger/", json=payload_payment)
+ assert resp.status_code == 200
+
+ # Snapshot
+ resp = client.get(f"/api/billing/statements/{file_no}")
+ assert resp.status_code == 200
+ data = resp.json()
+
+ # charges_billed = 300
+ assert data["totals"]["charges_billed"] == 300.0
+ # charges_unbilled = 187.5 + 49.995 ~= 237.50
+ assert data["totals"]["charges_unbilled"] == 237.5
+ # charges_total = 537.5
+ assert data["totals"]["charges_total"] == 537.5
+ # payments = 100
+ assert data["totals"]["payments"] == 100.0
+ # current_balance = 437.5
+ assert data["totals"]["current_balance"] == 437.5
+
+ # Unbilled entries include two items
+ unbilled = data["unbilled_entries"]
+ assert len(unbilled) == 2
+ codes = {e["t_code"] for e in unbilled}
+ assert "TIME" in codes and "MISC" in codes
+
+
+def test_generate_statement_missing_file_returns_404(client: TestClient):
+ resp = client.post(
+ "/api/billing/statements/generate",
+ json={"file_no": "NOFILE-123"},
+ )
+ assert resp.status_code == 404
+
+
+def test_generate_statement_empty_ledger(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.post(
+ "/api/billing/statements/generate",
+ json={"file_no": file_no},
+ )
+ assert resp.status_code == 200, resp.text
+ body = resp.json()
+ assert body["file_no"] == file_no
+ assert body["unbilled_count"] == 0
+ assert body["totals"]["charges_total"] == 0
+ assert body["totals"]["payments"] == 0
+ # Verify file saved
+ path = body["export_path"]
+ assert isinstance(path, str) and path.endswith(".html")
+ assert os.path.exists(path)
+ # Read and check minimal content
+ with open(path, "r", encoding="utf-8") as f:
+ html = f.read()
+ assert "Statement" in html and file_no in html
+
+
+def test_generate_statement_populated(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Populate entries similar to snapshot test
+ # Time entry unbilled: 1.25h * 150 = 187.5
+ payload_time = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "TIME",
+ "t_type": "2",
+ "empl_num": "E01",
+ "quantity": 1.25,
+ "rate": 150.0,
+ "amount": 187.5,
+ "billed": "N",
+ "note": "Work 1",
+ }
+ assert client.post("/api/financial/ledger/", json=payload_time).status_code == 200
+
+ # Flat fee billed: 300
+ payload_flat = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "FLAT",
+ "t_type": "3",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 300.0,
+ "billed": "Y",
+ "note": "Flat fee",
+ }
+ assert client.post("/api/financial/ledger/", json=payload_flat).status_code == 200
+
+ # Disbursement unbilled: 49.995 (rounds to 50.00)
+ payload_disb = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "MISC",
+ "t_type": "4",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 49.995,
+ "billed": "N",
+ "note": "Expense",
+ }
+ assert client.post("/api/financial/ledger/", json=payload_disb).status_code == 200
+
+ # Payment: 100
+ payload_payment = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "PMT",
+ "t_type": "5",
+ "empl_num": "E01",
+ "quantity": 0.0,
+ "rate": 0.0,
+ "amount": 100.0,
+ "billed": "Y",
+ "note": "Payment",
+ }
+ assert client.post("/api/financial/ledger/", json=payload_payment).status_code == 200
+
+ # Generate
+ resp = client.post(
+ "/api/billing/statements/generate",
+ json={"file_no": file_no},
+ )
+ assert resp.status_code == 200, resp.text
+ data = resp.json()
+ assert data["file_no"] == file_no
+ assert data["unbilled_count"] == 2
+ assert data["totals"]["charges_billed"] == 300.0
+ assert data["totals"]["charges_unbilled"] == 237.5
+ assert data["totals"]["charges_total"] == 537.5
+ assert data["totals"]["payments"] == 100.0
+ assert data["totals"]["current_balance"] == 437.5
+ # Verify saved file exists
+ assert os.path.exists(data["export_path"]) and data["filename"].endswith(".html")
+
+
+def test_list_statements_empty(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert isinstance(data, list) and len(data) == 0
+
+
+def test_list_statements_with_generated_files(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate two statements
+ resp1 = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp1.status_code == 200
+ file1 = resp1.json()
+
+ # Small delay to ensure different timestamps
+ import time
+ time.sleep(1.1) # Ensure different seconds in filename
+
+ resp2 = client.post("/api/billing/statements/generate", json={"file_no": file_no, "period": "2024-01"})
+ assert resp2.status_code == 200
+ file2 = resp2.json()
+
+ # List all statements
+ resp = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp.status_code == 200
+ data = resp.json()
+
+ assert len(data) == 2
+
+ # Check structure
+ for item in data:
+ assert "filename" in item
+ assert "size" in item
+ assert "created" in item
+ assert item["size"] > 0
+
+ # Should be sorted newest first (file2 should be first)
+ filenames = [item["filename"] for item in data]
+ assert file2["filename"] in filenames
+ assert file1["filename"] in filenames
+
+
+def test_list_statements_with_period_filter(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate statements with different periods
+ resp1 = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp1.status_code == 200
+
+ resp2 = client.post("/api/billing/statements/generate", json={"file_no": file_no, "period": "2024-01"})
+ assert resp2.status_code == 200
+
+ # Filter by period
+ resp = client.get(f"/api/billing/statements/{file_no}/list?period=2024-01")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert len(data) == 1
+ assert data[0]["filename"] == resp2.json()["filename"]
+
+
+def test_list_statements_file_not_found(client: TestClient):
+ resp = client.get("/api/billing/statements/NONEXISTENT/list")
+ assert resp.status_code == 404
+ assert "File not found" in resp.json()["error"]["message"]
+
+
+def test_download_statement_latest(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate a statement
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen.status_code == 200
+ gen_data = resp_gen.json()
+
+ # Download latest
+ resp = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp.status_code == 200
+ assert resp.headers["content-type"] == "text/html; charset=utf-8"
+ assert "content-disposition" in resp.headers
+ assert gen_data["filename"] in resp.headers["content-disposition"]
+
+ # Verify HTML content
+ content = resp.content.decode("utf-8")
+ assert "Statement" in content
+ assert file_no in content
+
+
+def test_download_statement_with_period_filter(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate statements with different periods
+ resp1 = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp1.status_code == 200
+
+ resp2 = client.post("/api/billing/statements/generate", json={"file_no": file_no, "period": "2024-01"})
+ assert resp2.status_code == 200
+ gen_data2 = resp2.json()
+
+ # Download with period filter
+ resp = client.get(f"/api/billing/statements/{file_no}/download?period=2024-01")
+ assert resp.status_code == 200
+ assert gen_data2["filename"] in resp.headers["content-disposition"]
+
+
+def test_download_statement_no_files(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp.status_code == 404
+ assert "No statements found" in resp.json()["error"]["message"]
+
+
+def test_download_statement_no_matching_period(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate statement without period
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen.status_code == 200
+
+ # Try to download with different period
+ resp = client.get(f"/api/billing/statements/{file_no}/download?period=2024-01")
+ assert resp.status_code == 404
+ assert "No statements found for requested period" in resp.json()["error"]["message"]
+
+
+def test_download_statement_file_not_found(client: TestClient):
+ resp = client.get("/api/billing/statements/NONEXISTENT/download")
+ assert resp.status_code == 404
+ assert "File not found" in resp.json()["error"]["message"]
+
+
+def test_delete_statement_success(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate a statement
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen.status_code == 200
+ gen_data = resp_gen.json()
+ filename = gen_data["filename"]
+
+ # Verify file exists
+ assert os.path.exists(gen_data["export_path"])
+
+ # Delete statement
+ resp = client.delete(f"/api/billing/statements/{file_no}/{filename}")
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["message"] == "Statement deleted successfully"
+ assert data["filename"] == filename
+
+ # Verify file is gone
+ assert not os.path.exists(gen_data["export_path"])
+
+
+def test_delete_statement_file_not_found(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.delete(f"/api/billing/statements/{file_no}/nonexistent.html")
+ assert resp.status_code == 404
+ assert "Statement not found" in resp.json()["error"]["message"]
+
+
+def test_delete_statement_invalid_file_no(client: TestClient):
+ resp = client.delete("/api/billing/statements/NONEXISTENT/test.html")
+ assert resp.status_code == 404
+ assert "File not found" in resp.json()["error"]["message"]
+
+
+def test_delete_statement_security_invalid_filename(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Try to delete a file that doesn't match the expected pattern
+ resp = client.delete(f"/api/billing/statements/{file_no}/malicious_file.html")
+ assert resp.status_code == 404
+ assert "Statement not found" in resp.json()["error"]["message"]
+
+
+def test_delete_statement_security_wrong_file_no(client: TestClient):
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ # Generate statement for file_no1
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no1})
+ assert resp_gen.status_code == 200
+ gen_data = resp_gen.json()
+ filename = gen_data["filename"]
+
+ # Try to delete file_no1's statement using file_no2
+ resp = client.delete(f"/api/billing/statements/{file_no2}/{filename}")
+ assert resp.status_code == 404
+ assert "Statement not found" in resp.json()["error"]["message"]
+
+ # Verify original file still exists
+ assert os.path.exists(gen_data["export_path"])
+
+
+def test_delete_statement_security_path_traversal(client: TestClient):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Try path traversal attack
+ resp = client.delete(f"/api/billing/statements/{file_no}/../../../etc/passwd")
+ assert resp.status_code == 404
+ # This one returns a different message since it's caught by FastAPI routing
+ response_data = resp.json()
+ assert resp.status_code == 404
+
+
+# Integration Tests - Complete Workflow
+
+
+def test_complete_billing_workflow_single_statement(client: TestClient):
+ """Test complete workflow: generate -> list -> download -> delete"""
+ # Setup
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # 1. Generate statement
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen.status_code == 200
+ gen_data = resp_gen.json()
+ filename = gen_data["filename"]
+
+ # 2. List statements - should show 1 item
+ resp_list = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list.status_code == 200
+ list_data = resp_list.json()
+ assert len(list_data) == 1
+ assert list_data[0]["filename"] == filename
+ assert list_data[0]["size"] > 0
+
+ # 3. Download statement - should return HTML
+ resp_download = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp_download.status_code == 200
+ assert resp_download.headers["content-type"] == "text/html; charset=utf-8"
+ assert filename in resp_download.headers["content-disposition"]
+ content = resp_download.content.decode("utf-8")
+ assert "Statement" in content
+ assert file_no in content
+
+ # 4. Delete statement
+ resp_delete = client.delete(f"/api/billing/statements/{file_no}/{filename}")
+ assert resp_delete.status_code == 200
+ delete_data = resp_delete.json()
+ assert delete_data["message"] == "Statement deleted successfully"
+ assert delete_data["filename"] == filename
+
+ # 5. Verify deletion - list should be empty
+ resp_list_after = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list_after.status_code == 200
+ assert len(resp_list_after.json()) == 0
+
+ # 6. Download should fail after deletion
+ resp_download_after = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp_download_after.status_code == 404
+
+
+def test_complete_billing_workflow_multiple_statements_with_periods(client: TestClient):
+ """Test workflow with multiple statements and period filtering"""
+ # Setup
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # 1. Generate statements with different periods
+ resp_gen1 = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen1.status_code == 200
+ gen_data1 = resp_gen1.json()
+
+ import time
+ time.sleep(1.1) # Ensure different timestamps
+
+ resp_gen2 = client.post("/api/billing/statements/generate", json={"file_no": file_no, "period": "2024-01"})
+ assert resp_gen2.status_code == 200
+ gen_data2 = resp_gen2.json()
+
+ time.sleep(1.1)
+
+ resp_gen3 = client.post("/api/billing/statements/generate", json={"file_no": file_no, "period": "2024-02"})
+ assert resp_gen3.status_code == 200
+ gen_data3 = resp_gen3.json()
+
+ # 2. List all statements - should show 3 items, newest first
+ resp_list_all = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list_all.status_code == 200
+ list_all_data = resp_list_all.json()
+ assert len(list_all_data) == 3
+
+ # Verify sorting (newest first) - gen3 should be first
+ filenames = [item["filename"] for item in list_all_data]
+ assert gen_data3["filename"] == filenames[0]
+
+ # 3. List with period filter - should show only 2024-01
+ resp_list_filtered = client.get(f"/api/billing/statements/{file_no}/list?period=2024-01")
+ assert resp_list_filtered.status_code == 200
+ list_filtered_data = resp_list_filtered.json()
+ assert len(list_filtered_data) == 1
+ assert list_filtered_data[0]["filename"] == gen_data2["filename"]
+
+ # 4. Download with period filter - should get 2024-01 statement
+ resp_download_filtered = client.get(f"/api/billing/statements/{file_no}/download?period=2024-01")
+ assert resp_download_filtered.status_code == 200
+ assert gen_data2["filename"] in resp_download_filtered.headers["content-disposition"]
+
+ # 5. Download without filter - should get newest (gen3)
+ resp_download_latest = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp_download_latest.status_code == 200
+ assert gen_data3["filename"] in resp_download_latest.headers["content-disposition"]
+
+ # 6. Delete middle statement (gen2)
+ resp_delete = client.delete(f"/api/billing/statements/{file_no}/{gen_data2['filename']}")
+ assert resp_delete.status_code == 200
+
+ # 7. Verify partial deletion
+ resp_list_after_delete = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list_after_delete.status_code == 200
+ list_after_data = resp_list_after_delete.json()
+ assert len(list_after_data) == 2
+ remaining_filenames = [item["filename"] for item in list_after_data]
+ assert gen_data1["filename"] in remaining_filenames
+ assert gen_data3["filename"] in remaining_filenames
+ assert gen_data2["filename"] not in remaining_filenames
+
+ # 8. Period filter should return nothing for 2024-01
+ resp_list_filtered_after = client.get(f"/api/billing/statements/{file_no}/list?period=2024-01")
+ assert resp_list_filtered_after.status_code == 200
+ assert len(resp_list_filtered_after.json()) == 0
+
+ # 9. Download with 2024-01 filter should fail
+ resp_download_filtered_after = client.get(f"/api/billing/statements/{file_no}/download?period=2024-01")
+ assert resp_download_filtered_after.status_code == 404
+
+
+def test_complete_billing_workflow_with_ledger_data(client: TestClient):
+ """Test workflow with actual ledger data to verify statement content"""
+ # Setup
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Add ledger entries
+ time_entry = {
+ "file_no": file_no,
+ "date": date.today().isoformat(),
+ "t_code": "TIME",
+ "t_type": "2",
+ "empl_num": "E01",
+ "quantity": 2.5,
+ "rate": 200.0,
+ "amount": 500.0,
+ "billed": "N",
+ "note": "Legal research",
+ }
+ resp_ledger = client.post("/api/financial/ledger/", json=time_entry)
+ assert resp_ledger.status_code == 200
+
+ # Generate statement
+ resp_gen = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_gen.status_code == 200
+ gen_data = resp_gen.json()
+
+ # Verify statement metadata includes ledger data
+ assert gen_data["totals"]["charges_unbilled"] == 500.0
+ assert gen_data["unbilled_count"] == 1
+
+ # Download and verify content
+ resp_download = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp_download.status_code == 200
+ content = resp_download.content.decode("utf-8")
+
+ # Verify statement contains ledger data
+ assert "Legal research" in content
+ assert "500.00" in content
+ assert "2.5" in content # quantity
+ assert "200.00" in content # rate
+
+ # List statements and verify metadata
+ resp_list = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list.status_code == 200
+ list_data = resp_list.json()
+ assert len(list_data) == 1
+ assert list_data[0]["size"] > 1000 # Statement with data should be larger
+
+
+def test_cross_file_security_integration(client: TestClient):
+ """Test that the complete workflow respects file security boundaries"""
+ # Setup two different files
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ # Generate statements for both files
+ resp_gen1 = client.post("/api/billing/statements/generate", json={"file_no": file_no1})
+ assert resp_gen1.status_code == 200
+ gen_data1 = resp_gen1.json()
+
+ resp_gen2 = client.post("/api/billing/statements/generate", json={"file_no": file_no2})
+ assert resp_gen2.status_code == 200
+ gen_data2 = resp_gen2.json()
+
+ # 1. List statements for file1 should only show file1's statements
+ resp_list1 = client.get(f"/api/billing/statements/{file_no1}/list")
+ assert resp_list1.status_code == 200
+ list1_data = resp_list1.json()
+ assert len(list1_data) == 1
+ assert list1_data[0]["filename"] == gen_data1["filename"]
+
+ # 2. List statements for file2 should only show file2's statements
+ resp_list2 = client.get(f"/api/billing/statements/{file_no2}/list")
+ assert resp_list2.status_code == 200
+ list2_data = resp_list2.json()
+ assert len(list2_data) == 1
+ assert list2_data[0]["filename"] == gen_data2["filename"]
+
+ # 3. Download for file1 should only get file1's statement
+ resp_download1 = client.get(f"/api/billing/statements/{file_no1}/download")
+ assert resp_download1.status_code == 200
+ assert gen_data1["filename"] in resp_download1.headers["content-disposition"]
+
+ # 4. Try to delete file1's statement using file2's endpoint - should fail
+ resp_delete_cross = client.delete(f"/api/billing/statements/{file_no2}/{gen_data1['filename']}")
+ assert resp_delete_cross.status_code == 404
+
+ # 5. Verify file1's statement still exists
+ resp_list1_after = client.get(f"/api/billing/statements/{file_no1}/list")
+ assert resp_list1_after.status_code == 200
+ assert len(resp_list1_after.json()) == 1
+
+ # 6. Proper deletion should work
+ resp_delete_proper = client.delete(f"/api/billing/statements/{file_no1}/{gen_data1['filename']}")
+ assert resp_delete_proper.status_code == 200
+
+ # 7. Verify file1 list is now empty but file2 is unaffected
+ resp_list1_final = client.get(f"/api/billing/statements/{file_no1}/list")
+ assert resp_list1_final.status_code == 200
+ assert len(resp_list1_final.json()) == 0
+
+ resp_list2_final = client.get(f"/api/billing/statements/{file_no2}/list")
+ assert resp_list2_final.status_code == 200
+ assert len(resp_list2_final.json()) == 1
+
+
+# Batch Statement Generation Tests
+
+
+def test_batch_generate_statements_empty_list(client: TestClient):
+ """Test batch generation with empty file list"""
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": []})
+ assert resp.status_code == 400
+ assert "At least one file number must be provided" in resp.json()["error"]["message"]
+
+
+def test_batch_generate_statements_too_many_files(client: TestClient):
+ """Test batch generation with too many files"""
+ file_numbers = [f"FILE-{i}" for i in range(51)] # 51 files exceeds limit
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 422 # Pydantic validation error
+ assert "max_length" in resp.text.lower() or "validation" in resp.text.lower()
+
+
+def test_batch_generate_statements_single_file_success(client: TestClient):
+ """Test batch generation with a single existing file"""
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": [file_no]})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 1
+ assert data["successful"] == 1
+ assert data["failed"] == 0
+ assert data["success_rate"] == 100.0
+ assert "batch_id" in data
+ assert data["batch_id"].startswith("batch_")
+ assert len(data["results"]) == 1
+
+ result = data["results"][0]
+ assert result["file_no"] == file_no
+ assert result["status"] == "success"
+ assert result["statement_meta"] is not None
+ assert result["statement_meta"]["file_no"] == file_no
+ assert result["statement_meta"]["filename"].endswith(".html")
+
+
+def test_batch_generate_statements_multiple_files_success(client: TestClient):
+ """Test batch generation with multiple existing files"""
+ # Create test data
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ owner_id3 = _create_customer(client)
+ file_no3 = _create_file(client, owner_id3)
+
+ file_numbers = [file_no1, file_no2, file_no3]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 3
+ assert data["successful"] == 3
+ assert data["failed"] == 0
+ assert data["success_rate"] == 100.0
+ assert len(data["results"]) == 3
+
+ # Check all files were processed successfully
+ file_nos_in_results = [r["file_no"] for r in data["results"]]
+ assert set(file_nos_in_results) == set(file_numbers)
+
+ for result in data["results"]:
+ assert result["status"] == "success"
+ assert result["statement_meta"] is not None
+ assert result["statement_meta"]["filename"].endswith(".html")
+
+
+def test_batch_generate_statements_mixed_success_failure(client: TestClient):
+ """Test batch generation with mix of existing and non-existing files"""
+ # Create one existing file
+ owner_id = _create_customer(client)
+ existing_file = _create_file(client, owner_id)
+
+ # Mix of existing and non-existing files
+ file_numbers = [existing_file, "NONEXISTENT-1", "NONEXISTENT-2"]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 3
+ assert data["successful"] == 1
+ assert data["failed"] == 2
+ assert data["success_rate"] == 33.33
+ assert len(data["results"]) == 3
+
+ # Check results
+ results_by_file = {r["file_no"]: r for r in data["results"]}
+
+ # Existing file should succeed
+ assert results_by_file[existing_file]["status"] == "success"
+ assert results_by_file[existing_file]["statement_meta"] is not None
+
+ # Non-existing files should fail
+ assert results_by_file["NONEXISTENT-1"]["status"] == "failed"
+ assert "not found" in results_by_file["NONEXISTENT-1"]["message"].lower()
+ assert results_by_file["NONEXISTENT-1"]["error_details"] is not None
+
+ assert results_by_file["NONEXISTENT-2"]["status"] == "failed"
+ assert "not found" in results_by_file["NONEXISTENT-2"]["message"].lower()
+
+
+def test_batch_generate_statements_all_failures(client: TestClient):
+ """Test batch generation where all files fail"""
+ file_numbers = ["NONEXISTENT-1", "NONEXISTENT-2", "NONEXISTENT-3"]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 3
+ assert data["successful"] == 0
+ assert data["failed"] == 3
+ assert data["success_rate"] == 0.0
+ assert len(data["results"]) == 3
+
+ # All should be failures
+ for result in data["results"]:
+ assert result["status"] == "failed"
+ assert "not found" in result["message"].lower()
+ assert result["error_details"] is not None
+
+
+def test_batch_generate_statements_with_period(client: TestClient):
+ """Test batch generation with period filter"""
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ file_numbers = [file_no1, file_no2]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers,
+ "period": "2024-01"
+ })
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 2
+ assert data["successful"] == 2
+ assert data["failed"] == 0
+
+ # Check that period was applied to all statements
+ for result in data["results"]:
+ assert result["status"] == "success"
+ assert result["statement_meta"]["period"] == "2024-01"
+
+
+def test_batch_generate_statements_deduplicates_files(client: TestClient):
+ """Test that batch generation removes duplicate file numbers"""
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Pass same file number multiple times
+ file_numbers = [file_no, file_no, file_no]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ # Should only process once, not three times
+ assert data["total_files"] == 1
+ assert data["successful"] == 1
+ assert data["failed"] == 0
+ assert len(data["results"]) == 1
+ assert data["results"][0]["file_no"] == file_no
+
+
+def test_batch_generate_statements_timing_and_metadata(client: TestClient):
+ """Test that batch operation includes proper timing and metadata"""
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": [file_no]})
+ assert resp.status_code == 200
+
+ data = resp.json()
+
+ # Check timing fields
+ assert "started_at" in data
+ assert "completed_at" in data
+ assert "processing_time_seconds" in data
+ assert isinstance(data["processing_time_seconds"], (int, float))
+ assert data["processing_time_seconds"] >= 0
+
+ # Check batch ID format
+ assert "batch_id" in data
+ batch_id = data["batch_id"]
+ assert batch_id.startswith("batch_")
+ assert len(batch_id.split("_")) == 4 # batch_YYYYMMDD_HHMMSS_hash
+
+ # Parse timestamps
+ from datetime import datetime
+ started_at = datetime.fromisoformat(data["started_at"].replace('Z', '+00:00'))
+ completed_at = datetime.fromisoformat(data["completed_at"].replace('Z', '+00:00'))
+ assert completed_at >= started_at
+
+
+def test_batch_generate_statements_with_ledger_data(client: TestClient):
+ """Test batch generation with files containing ledger data"""
+ # Create files with some ledger data
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ # Add ledger entry to first file
+ time_entry = {
+ "file_no": file_no1,
+ "date": date.today().isoformat(),
+ "t_code": "TIME",
+ "t_type": "2",
+ "empl_num": "E01",
+ "quantity": 1.5,
+ "rate": 200.0,
+ "amount": 300.0,
+ "billed": "N",
+ "note": "Legal work",
+ }
+ assert client.post("/api/financial/ledger/", json=time_entry).status_code == 200
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+ # file_no2 has no ledger entries
+
+ file_numbers = [file_no1, file_no2]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["successful"] == 2
+ assert data["failed"] == 0
+
+ # Check that file with ledger data has unbilled entries
+ results_by_file = {r["file_no"]: r for r in data["results"]}
+
+ file1_result = results_by_file[file_no1]
+ assert file1_result["statement_meta"]["unbilled_count"] == 1
+ assert file1_result["statement_meta"]["totals"]["charges_unbilled"] == 300.0
+
+ file2_result = results_by_file[file_no2]
+ assert file2_result["statement_meta"]["unbilled_count"] == 0
+ assert file2_result["statement_meta"]["totals"]["charges_unbilled"] == 0.0
+
+
+def test_batch_generate_statements_file_system_verification(client: TestClient):
+ """Test that batch generation actually creates files on disk"""
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ file_numbers = [file_no1, file_no2]
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["successful"] == 2
+
+ # Verify files exist on disk
+ for result in data["results"]:
+ if result["status"] == "success":
+ export_path = result["statement_meta"]["export_path"]
+ assert os.path.exists(export_path)
+
+ # Check file content
+ with open(export_path, "r", encoding="utf-8") as f:
+ content = f.read()
+ assert "Statement" in content
+ assert result["file_no"] in content
+
+
+def test_batch_generate_statements_performance_within_limits(client: TestClient):
+ """Test that batch generation performs reasonably with multiple files"""
+ # Create several files
+ file_numbers = []
+ for i in range(10): # Test with 10 files
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+ file_numbers.append(file_no)
+
+ import time
+ start_time = time.time()
+
+ resp = client.post("/api/billing/statements/batch-generate", json={"file_numbers": file_numbers})
+
+ end_time = time.time()
+ actual_processing_time = end_time - start_time
+
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert data["total_files"] == 10
+ assert data["successful"] == 10
+ assert data["failed"] == 0
+
+ # Processing should be reasonably fast (less than 30 seconds for 10 files)
+ assert actual_processing_time < 30
+
+ # Reported processing time should be close to actual
+ reported_time = data["processing_time_seconds"]
+ assert abs(reported_time - actual_processing_time) < 5 # Within 5 seconds tolerance
+
+
+# Integration Tests - Batch + Existing Endpoints
+
+
+def test_batch_generate_then_list_and_download(client: TestClient):
+ """Test complete workflow: batch generate -> list -> download"""
+ # Create test files
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ # Batch generate
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no1, file_no2]
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ assert batch_data["successful"] == 2
+
+ # List statements for each file
+ for result in batch_data["results"]:
+ file_no = result["file_no"]
+ filename = result["statement_meta"]["filename"]
+
+ # List should show the generated statement
+ resp_list = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list.status_code == 200
+ list_data = resp_list.json()
+ assert len(list_data) >= 1
+ filenames = [item["filename"] for item in list_data]
+ assert filename in filenames
+
+ # Download should work
+ resp_download = client.get(f"/api/billing/statements/{file_no}/download")
+ assert resp_download.status_code == 200
+ assert filename in resp_download.headers["content-disposition"]
+
+
+def test_batch_generate_with_existing_statements(client: TestClient):
+ """Test batch generation when files already have statements"""
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ # Generate individual statement first
+ resp_single = client.post("/api/billing/statements/generate", json={"file_no": file_no})
+ assert resp_single.status_code == 200
+ single_filename = resp_single.json()["filename"]
+
+ # Small delay to ensure different microsecond timestamps
+ import time
+ time.sleep(0.001)
+
+ # Generate via batch
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no]
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ assert batch_data["successful"] == 1
+ batch_filename = batch_data["results"][0]["statement_meta"]["filename"]
+
+ # Should have two different files (due to microsecond timestamps)
+ assert single_filename != batch_filename
+
+ # List should show both
+ resp_list = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list.status_code == 200
+ list_data = resp_list.json()
+ assert len(list_data) == 2
+ filenames = [item["filename"] for item in list_data]
+ assert single_filename in filenames
+ assert batch_filename in filenames
+
+
+# Progress Polling Tests
+
+
+def test_batch_progress_polling_successful_operation(client: TestClient):
+ """Test progress polling for a successful batch operation"""
+ # Create test files
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ file_numbers = [file_no1, file_no2]
+
+ # Start batch operation
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ batch_id = batch_data["batch_id"]
+
+ # Check progress (should be completed after synchronous operation)
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+
+ progress_data = resp_progress.json()
+ assert progress_data["batch_id"] == batch_id
+ assert progress_data["status"] == "completed"
+ assert progress_data["total_files"] == 2
+ assert progress_data["processed_files"] == 2
+ assert progress_data["successful_files"] == 2
+ assert progress_data["failed_files"] == 0
+ assert progress_data["success_rate"] == 100.0
+ assert progress_data["started_at"] is not None
+ assert progress_data["completed_at"] is not None
+ assert progress_data["processing_time_seconds"] is not None
+ assert len(progress_data["files"]) == 2
+
+ # Check individual file statuses
+ for file_progress in progress_data["files"]:
+ assert file_progress["file_no"] in file_numbers
+ assert file_progress["status"] == "completed"
+ assert file_progress["started_at"] is not None
+ assert file_progress["completed_at"] is not None
+ assert file_progress["statement_meta"] is not None
+
+
+def test_batch_progress_polling_mixed_results(client: TestClient):
+ """Test progress polling for batch operation with mixed success/failure"""
+ # Create one existing file
+ owner_id = _create_customer(client)
+ existing_file = _create_file(client, owner_id)
+
+ # Mix of existing and non-existing files
+ file_numbers = [existing_file, "NONEXISTENT-1"]
+
+ # Start batch operation
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ batch_id = batch_data["batch_id"]
+
+ # Check progress
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+
+ progress_data = resp_progress.json()
+ assert progress_data["batch_id"] == batch_id
+ assert progress_data["status"] == "completed"
+ assert progress_data["total_files"] == 2
+ assert progress_data["processed_files"] == 2
+ assert progress_data["successful_files"] == 1
+ assert progress_data["failed_files"] == 1
+ assert progress_data["success_rate"] == 50.0
+
+ # Check individual file statuses
+ files_by_no = {f["file_no"]: f for f in progress_data["files"]}
+
+ # Existing file should be successful
+ assert files_by_no[existing_file]["status"] == "completed"
+ assert files_by_no[existing_file]["statement_meta"] is not None
+
+ # Non-existing file should be failed
+ assert files_by_no["NONEXISTENT-1"]["status"] == "failed"
+ assert files_by_no["NONEXISTENT-1"]["error_message"] is not None
+ assert "not found" in files_by_no["NONEXISTENT-1"]["error_message"].lower()
+
+
+def test_batch_progress_polling_nonexistent_batch(client: TestClient):
+ """Test progress polling for non-existent batch ID"""
+ resp = client.get("/api/billing/statements/batch-progress/nonexistent-batch-id")
+ assert resp.status_code == 404
+ assert "not found" in resp.json()["error"]["message"].lower()
+
+
+def test_list_active_batches_empty(client: TestClient):
+ """Test listing active batches when none exist"""
+ resp = client.get("/api/billing/statements/batch-list")
+ assert resp.status_code == 200
+ assert isinstance(resp.json(), list)
+ # Note: May contain active batches from other tests, so we just check it's a list
+
+
+def test_batch_cancellation_nonexistent(client: TestClient):
+ """Test cancelling a non-existent batch operation"""
+ resp = client.delete("/api/billing/statements/batch-progress/nonexistent-batch-id")
+ assert resp.status_code == 404
+ assert "not found" in resp.json()["error"]["message"].lower()
+
+
+def test_batch_cancellation_already_completed(client: TestClient):
+ """Test cancelling an already completed batch operation"""
+ # Create and complete a batch operation
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no]
+ })
+ assert resp_batch.status_code == 200
+ batch_id = resp_batch.json()["batch_id"]
+
+ # Try to cancel completed batch
+ resp_cancel = client.delete(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_cancel.status_code == 400
+ assert "cannot cancel" in resp_cancel.json()["error"]["message"].lower()
+
+
+def test_progress_store_cleanup_mechanism(client: TestClient):
+ """Test that progress store cleanup works correctly"""
+ # This is more of an integration test to ensure the cleanup mechanism works
+ # We can't easily test the time-based cleanup without mocking time
+
+ # Create a batch operation
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no]
+ })
+ assert resp_batch.status_code == 200
+ batch_id = resp_batch.json()["batch_id"]
+
+ # Verify we can still get progress
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+
+ # The cleanup mechanism runs automatically in the background
+ # In a real test environment, we could mock the retention period
+ # For now, we just verify the basic functionality works
+
+
+def test_batch_progress_timing_metadata(client: TestClient):
+ """Test that batch progress includes proper timing metadata"""
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no]
+ })
+ assert resp_batch.status_code == 200
+ batch_id = resp_batch.json()["batch_id"]
+
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+ progress_data = resp_progress.json()
+
+ # Check timing fields exist and are valid
+ assert "started_at" in progress_data
+ assert "updated_at" in progress_data
+ assert "completed_at" in progress_data
+ assert "processing_time_seconds" in progress_data
+
+ # Parse timestamps to ensure they're valid ISO format
+ from datetime import datetime
+ started_at = datetime.fromisoformat(progress_data["started_at"].replace('Z', '+00:00'))
+ updated_at = datetime.fromisoformat(progress_data["updated_at"].replace('Z', '+00:00'))
+ completed_at = datetime.fromisoformat(progress_data["completed_at"].replace('Z', '+00:00'))
+
+ # Logical order check
+ assert updated_at >= started_at
+ assert completed_at >= started_at
+
+ # Processing time should be reasonable
+ assert isinstance(progress_data["processing_time_seconds"], (int, float))
+ assert progress_data["processing_time_seconds"] >= 0
+
+
+def test_batch_progress_estimated_completion_logic(client: TestClient):
+ """Test that estimated completion time logic works"""
+ # For a completed batch, estimated_completion should be None
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": [file_no]
+ })
+ assert resp_batch.status_code == 200
+ batch_id = resp_batch.json()["batch_id"]
+
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+ progress_data = resp_progress.json()
+
+ # For completed operations, estimated completion should be None
+ assert progress_data["estimated_completion"] is None
+ assert progress_data["current_file"] is None
+
+
+def test_batch_progress_file_level_details(client: TestClient):
+ """Test that file-level progress details are accurate"""
+ # Create files with ledger data for more detailed testing
+ owner_id1 = _create_customer(client)
+ file_no1 = _create_file(client, owner_id1)
+
+ # Add ledger entry to first file
+ time_entry = {
+ "file_no": file_no1,
+ "date": date.today().isoformat(),
+ "t_code": "TIME",
+ "t_type": "2",
+ "empl_num": "E01",
+ "quantity": 2.0,
+ "rate": 175.0,
+ "amount": 350.0,
+ "billed": "N",
+ "note": "Progress test work",
+ }
+ assert client.post("/api/financial/ledger/", json=time_entry).status_code == 200
+
+ owner_id2 = _create_customer(client)
+ file_no2 = _create_file(client, owner_id2)
+
+ file_numbers = [file_no1, file_no2]
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers
+ })
+ assert resp_batch.status_code == 200
+ batch_id = resp_batch.json()["batch_id"]
+
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+ progress_data = resp_progress.json()
+
+ # Check file-level details
+ assert len(progress_data["files"]) == 2
+
+ for file_progress in progress_data["files"]:
+ assert file_progress["file_no"] in file_numbers
+ assert file_progress["status"] == "completed"
+ assert file_progress["started_at"] is not None
+ assert file_progress["completed_at"] is not None
+
+ # Successful files should have statement metadata
+ if file_progress["status"] == "completed":
+ assert file_progress["statement_meta"] is not None
+ assert file_progress["statement_meta"]["file_no"] == file_progress["file_no"]
+ assert file_progress["statement_meta"]["filename"].endswith(".html")
+ assert file_progress["statement_meta"]["size"] > 0
+
+ # File with ledger data should have unbilled entries
+ if file_progress["file_no"] == file_no1:
+ assert file_progress["statement_meta"]["unbilled_count"] == 1
+ assert file_progress["statement_meta"]["totals"]["charges_unbilled"] == 350.0
+
+
+def test_batch_progress_api_integration_workflow(client: TestClient):
+ """Test complete workflow integration: batch start -> progress polling -> completion"""
+ # Create multiple files for a more comprehensive test
+ file_numbers = []
+ for i in range(3):
+ owner_id = _create_customer(client)
+ file_no = _create_file(client, owner_id)
+ file_numbers.append(file_no)
+
+ # Start batch operation
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers,
+ "period": "2024-01"
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ batch_id = batch_data["batch_id"]
+
+ # 1. Check initial response has batch_id
+ assert batch_id.startswith("batch_")
+ assert batch_data["total_files"] == 3
+
+ # 2. Poll progress (should be completed for synchronous operation)
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+ progress_data = resp_progress.json()
+
+ # 3. Verify progress data matches batch response data
+ assert progress_data["batch_id"] == batch_id
+ assert progress_data["total_files"] == batch_data["total_files"]
+ assert progress_data["successful_files"] == batch_data["successful"]
+ assert progress_data["failed_files"] == batch_data["failed"]
+ assert progress_data["success_rate"] == batch_data["success_rate"]
+
+ # 4. Verify all individual files were processed
+ assert len(progress_data["files"]) == 3
+ for file_progress in progress_data["files"]:
+ assert file_progress["file_no"] in file_numbers
+ assert file_progress["status"] == "completed"
+
+ # Check that period was applied
+ if file_progress["statement_meta"]:
+ assert file_progress["statement_meta"]["period"] == "2024-01"
+
+ # 5. Verify generated statements exist and can be listed
+ for file_no in file_numbers:
+ resp_list = client.get(f"/api/billing/statements/{file_no}/list")
+ assert resp_list.status_code == 200
+ statements = resp_list.json()
+ assert len(statements) >= 1
+
+ # Find the statement from our batch
+ batch_statement = None
+ for stmt in statements:
+ if any(f["statement_meta"]["filename"] == stmt["filename"]
+ for f in progress_data["files"]
+ if f["file_no"] == file_no and f["statement_meta"]):
+ batch_statement = stmt
+ break
+
+ assert batch_statement is not None
+ assert batch_statement["size"] > 0
+
+
+def test_batch_progress_error_handling_and_recovery(client: TestClient):
+ """Test error handling in progress tracking with mixed file results"""
+ # Mix of valid and invalid files to test error handling
+ owner_id = _create_customer(client)
+ valid_file = _create_file(client, owner_id)
+
+ file_numbers = [valid_file, "INVALID-1", "INVALID-2", valid_file] # Include duplicate
+
+ resp_batch = client.post("/api/billing/statements/batch-generate", json={
+ "file_numbers": file_numbers
+ })
+ assert resp_batch.status_code == 200
+ batch_data = resp_batch.json()
+ batch_id = batch_data["batch_id"]
+
+ # Should deduplicate files
+ assert batch_data["total_files"] == 3 # valid_file, INVALID-1, INVALID-2
+
+ resp_progress = client.get(f"/api/billing/statements/batch-progress/{batch_id}")
+ assert resp_progress.status_code == 200
+ progress_data = resp_progress.json()
+
+ # Check overall batch status
+ assert progress_data["status"] == "completed"
+ assert progress_data["total_files"] == 3
+ assert progress_data["processed_files"] == 3
+ assert progress_data["successful_files"] == 1
+ assert progress_data["failed_files"] == 2
+
+ # Check individual file results
+ files_by_no = {f["file_no"]: f for f in progress_data["files"]}
+
+ # Valid file should succeed
+ assert files_by_no[valid_file]["status"] == "completed"
+ assert files_by_no[valid_file]["statement_meta"] is not None
+ assert files_by_no[valid_file]["error_message"] is None
+
+ # Invalid files should fail with error messages
+ for invalid_file in ["INVALID-1", "INVALID-2"]:
+ assert files_by_no[invalid_file]["status"] == "failed"
+ assert files_by_no[invalid_file]["statement_meta"] is None
+ assert files_by_no[invalid_file]["error_message"] is not None
+ assert "not found" in files_by_no[invalid_file]["error_message"].lower()
+