Compare commits

..

2 Commits

Author SHA1 Message Date
HotSwapp
0637fc2a63 chore: add structured logging with structlog; add request_id middleware; replace std logging 2025-10-06 22:22:04 -05:00
HotSwapp
b2d751f555 feat: Complete case edit functionality and Docker setup
- Fix case edit form data handling (POST requests now work correctly)
- Add comprehensive Docker setup with Dockerfile and docker-compose.yml
- Fix CSV import validation for client and case data
- Improve import error handling and column mapping
- Add .dockerignore for efficient Docker builds
- Complete end-to-end testing of full application workflow

All core functionality from del.plan.md now implemented and tested:
 Case view, edit, close, and reopen operations
 Data import from CSV files with validation
 Authentication and session management
 Dashboard with search and pagination
 Production-ready Docker containerization
2025-10-06 20:32:51 -05:00
10 changed files with 261 additions and 80 deletions

View File

@@ -1,21 +1,54 @@
# Version control
.git
.gitignore
__pycache__/
*.py[cod]
*.so
*.egg
*.egg-info/
.venv/
env/
venv/
build/
dist/
node_modules/
.DS_Store
.env
.env.*
delphi.db
cookies.txt
data-import/*
!data-import/.gitkeep
# Python
__pycache__
*.pyc
*.pyo
*.pyd
.Python
env
pip-log.txt
pip-delete-this-directory.txt
.tox
.coverage
.coverage.*
.pytest_cache
nosetests.xml
coverage.xml
*.cover
*.log
.venv
venv/
# IDE
.vscode
.idea
*.swp
*.swo
*~
# OS
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Documentation
README.md
TODO.md
*.md
# Old data directories (not needed in container)
old-csv/
old-database/
# Logs
*.log
# Temporary files
*.tmp

View File

@@ -1,30 +1,34 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# Use Python 3.13 slim image as base
FROM python:3.13-slim
# Set working directory
WORKDIR /app
# Minimal tooling for healthcheck
RUN apt-get update && apt-get install -y --no-install-recommends curl \
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt ./
# Copy requirements first for better layer caching
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY app ./app
COPY static ./static
COPY delphi-logo.webp ./delphi-logo.webp
COPY old-csv ./old-csv
COPY old-database ./old-database
COPY data-import ./data-import
# Copy application code
COPY . .
ENV DATABASE_URL=sqlite:///./delphi.db
# Create non-root user for security
RUN useradd --create-home --shell /bin/bash app && chown -R app:app /app
USER app
# Expose port
EXPOSE 8000
HEALTHCHECK --interval=30s --timeout=3s --retries=3 CMD curl -fsS http://localhost:8000/health || exit 1
CMD ["uvicorn","app.main:app","--host","0.0.0.0","--port","8000"]
# Health check
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Run the application
CMD ["python", "-m", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

Binary file not shown.

Binary file not shown.

57
app/logging_config.py Normal file
View File

@@ -0,0 +1,57 @@
"""
Structured logging configuration for the Delphi Database FastAPI app.
This module configures structlog to output JSON logs and integrates
context variables so request-specific fields (e.g., request_id) are
included automatically in log records.
"""
from __future__ import annotations
import logging
from typing import Any, Dict
import structlog
def _add_required_defaults(_: Any, __: str, event_dict: Dict[str, Any]) -> Dict[str, Any]:
"""
Ensure all required fields exist on every log entry so downstream
consumers receive a consistent schema.
"""
# Required fields per project requirements
event_dict.setdefault("request_id", None)
event_dict.setdefault("http.method", None)
event_dict.setdefault("http.path", None)
event_dict.setdefault("status_code", None)
event_dict.setdefault("user.id", None)
event_dict.setdefault("duration_ms", None)
return event_dict
def setup_logging(log_level: int = logging.INFO) -> None:
"""
Configure structlog for JSON logging with contextvars support.
Args:
log_level: Minimum log level for application logs.
"""
# Configure stdlib logging basic config for third-party libs (uvicorn, etc.)
logging.basicConfig(level=log_level)
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
_add_required_defaults,
structlog.processors.TimeStamper(fmt="iso", key="timestamp"),
structlog.processors.dict_tracebacks,
structlog.processors.JSONRenderer(),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.make_filtering_bound_logger(log_level),
cache_logger_on_first_use=True,
)

View File

@@ -6,7 +6,7 @@ and provides the main application instance.
"""
import os
import logging
import time
import csv
import json
import uuid
@@ -25,10 +25,13 @@ from sqlalchemy.orm import Session, joinedload
from sqlalchemy import or_
from dotenv import load_dotenv
from starlette.middleware.base import BaseHTTPMiddleware
import structlog
from structlog import contextvars as structlog_contextvars
from .database import create_tables, get_db, get_database_url
from .models import User, Case, Client, Phone, Transaction, Document, Payment, ImportLog
from .auth import authenticate_user, get_current_user_from_session
from .logging_config import setup_logging
# Load environment variables
load_dotenv()
@@ -38,9 +41,9 @@ SECRET_KEY = os.getenv("SECRET_KEY")
if not SECRET_KEY:
raise ValueError("SECRET_KEY environment variable must be set")
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Configure structured logging
setup_logging()
logger = structlog.get_logger(__name__)
# Configure Jinja2 templates
templates = Jinja2Templates(directory="app/templates")
@@ -74,6 +77,61 @@ class AuthMiddleware(BaseHTTPMiddleware):
return await call_next(request)
class RequestIdMiddleware(BaseHTTPMiddleware):
"""
Middleware that assigns a request_id and binds request context for logging.
Adds: request_id, http.method, http.path, user.id to the structlog context.
Emits a JSON access log with status_code and duration_ms after response.
"""
async def dispatch(self, request: Request, call_next):
start_time = time.perf_counter()
request_id = request.headers.get("X-Request-ID") or str(uuid.uuid4())
method = request.method
path = request.url.path
# user id from session if available (SessionMiddleware runs first)
user_id = request.session.get("user_id") if hasattr(request, "session") else None
structlog_contextvars.bind_contextvars(
request_id=request_id,
**{"http.method": method, "http.path": path, "user.id": user_id},
)
try:
response = await call_next(request)
status_code = response.status_code
except Exception as exc: # noqa: BLE001 - we re-raise after logging
status_code = 500
duration_ms = int((time.perf_counter() - start_time) * 1000)
logger.error(
"request",
status_code=status_code,
duration_ms=duration_ms,
exc_info=True,
)
structlog_contextvars.unbind_contextvars("request_id", "http.method", "http.path", "user.id")
raise
# Ensure response header has request id
try:
response.headers["X-Request-ID"] = request_id
except Exception:
pass
duration_ms = int((time.perf_counter() - start_time) * 1000)
logger.info(
"request",
status_code=status_code,
duration_ms=duration_ms,
)
structlog_contextvars.unbind_contextvars("request_id", "http.method", "http.path", "user.id")
return response
@asynccontextmanager
async def lifespan(app: FastAPI):
"""
@@ -84,20 +142,20 @@ async def lifespan(app: FastAPI):
- Logs database connection info
"""
# Startup
logger.info("Starting Delphi Database application...")
logger.info("app_start")
# Create database tables
create_tables()
logger.info("Database tables created/verified")
logger.info("db_tables_verified")
# Log database connection info
db_url = get_database_url()
logger.info(f"Database connected: {db_url}")
logger.info("db_connected", database_url=db_url)
yield
# Shutdown
logger.info("Shutting down Delphi Database application...")
logger.info("app_shutdown")
# Create FastAPI application with lifespan management
@@ -117,8 +175,9 @@ app.add_middleware(
allow_headers=["*"],
)
# Register authentication middleware with exempt paths
# Register request logging and authentication middleware with exempt paths
EXEMPT_PATHS = ["/", "/health", "/login", "/logout"]
app.add_middleware(RequestIdMiddleware)
app.add_middleware(AuthMiddleware, exempt_paths=EXEMPT_PATHS)
# Add SessionMiddleware for session management (must be added LAST so it runs FIRST)
@@ -227,7 +286,7 @@ def parse_date(date_str: str) -> Optional[datetime]:
except ValueError:
continue
logger.warning(f"Could not parse date: '{date_str}'")
logger.warning("parse_date_failed", value=date_str)
return None
@@ -239,7 +298,7 @@ def parse_float(value: str) -> Optional[float]:
try:
return float(value.strip())
except ValueError:
logger.warning(f"Could not parse float: '{value}'")
logger.warning("parse_float_failed", value=value)
return None
@@ -251,7 +310,7 @@ def parse_int(value: str) -> Optional[int]:
try:
return int(value.strip())
except ValueError:
logger.warning(f"Could not parse int: '{value}'")
logger.warning("parse_int_failed", value=value)
return None
@@ -754,7 +813,7 @@ async def health_check(db: Session = Depends(get_db)):
"users": user_count
}
except Exception as e:
logger.error(f"Health check failed: {e}")
logger.error("health_check_failed", error=str(e))
return {
"status": "unhealthy",
"database": "error",
@@ -790,6 +849,7 @@ async def login_submit(request: Request, db: Session = Depends(get_db)):
if not username or not password:
error_message = "Username and password are required"
logger.warning("login_failed", username=username, reason="missing_credentials")
return templates.TemplateResponse("login.html", {
"request": request,
"error": error_message
@@ -799,6 +859,7 @@ async def login_submit(request: Request, db: Session = Depends(get_db)):
user = authenticate_user(username, password)
if not user:
error_message = "Invalid username or password"
logger.warning("login_failed", username=username, reason="invalid_credentials")
return templates.TemplateResponse("login.html", {
"request": request,
"error": error_message
@@ -808,7 +869,9 @@ async def login_submit(request: Request, db: Session = Depends(get_db)):
request.session["user_id"] = user.id
request.session["user"] = {"id": user.id, "username": user.username}
logger.info(f"User '{username}' logged in successfully")
# Update bound context with authenticated user id
structlog_contextvars.bind_contextvars(**{"user.id": user.id})
logger.info("login_success", username=username, **{"user.id": user.id})
# Redirect to dashboard after successful login
return RedirectResponse(url="/dashboard", status_code=302)
@@ -823,7 +886,7 @@ async def logout(request: Request):
"""
username = request.session.get("user", {}).get("username", "unknown")
request.session.clear()
logger.info(f"User '{username}' logged out")
logger.info("logout", username=username)
return RedirectResponse(url="/", status_code=302)
@@ -883,11 +946,11 @@ async def dashboard(
page_numbers = list(range(start_page, end_page + 1))
logger.info(
"Rendering dashboard: q='%s', page=%s, page_size=%s, total=%s",
q,
page,
page_size,
total,
"dashboard_render",
query=q,
page=page,
page_size=page_size,
total=total,
)
return templates.TemplateResponse(
@@ -971,7 +1034,12 @@ async def admin_upload_files(
continue
# Log the upload operation
logger.info(f"Admin upload: {len(results)} files uploaded, {len(errors)} errors by user '{user.username}'")
logger.info(
"admin_upload",
uploaded_count=len(results),
error_count=len(errors),
username=user.username,
)
return templates.TemplateResponse("admin.html", {
"request": request,
@@ -1085,7 +1153,13 @@ async def admin_import_data(
total_errors += 1
# Log the import operation
logger.info(f"Admin import: {data_type}, {total_success} success, {total_errors} errors by user '{user.username}'")
logger.info(
"admin_import",
import_type=data_type,
success_count=total_success,
error_count=total_errors,
username=user.username,
)
return templates.TemplateResponse("admin.html", {
"request": request,
@@ -1181,7 +1255,7 @@ async def case_detail(
)
if not case_obj:
logger.warning("Case not found: id=%s", case_id)
logger.warning("case_not_found", case_id=case_id)
# Get any errors from session and clear them
errors = request.session.pop("case_update_errors", None)
@@ -1198,7 +1272,7 @@ async def case_detail(
status_code=404,
)
logger.info("Rendering case detail: id=%s, file_no='%s'", case_obj.id, case_obj.file_no)
logger.info("case_detail", case_id=case_obj.id, file_no=case_obj.file_no)
# Get any errors from session and clear them
errors = request.session.pop("case_update_errors", None)
@@ -1237,7 +1311,7 @@ async def case_update(
# Fetch the case
case_obj = db.query(Case).filter(Case.id == case_id).first()
if not case_obj:
logger.warning("Case not found for update: id=%s", case_id)
logger.warning("case_not_found_update", case_id=case_id)
return RedirectResponse(url=f"/case/{case_id}", status_code=302)
# Validate and process fields
@@ -1290,11 +1364,20 @@ async def case_update(
# Apply updates
try:
changed_fields = {}
for field, value in update_data.items():
old_value = getattr(case_obj, field)
if old_value != value:
changed_fields[field] = {"old": old_value, "new": value}
setattr(case_obj, field, value)
db.commit()
logger.info("Case updated successfully: id=%s, fields=%s", case_id, list(update_data.keys()))
logger.info(
"case_update",
case_id=case_id,
changed_fields=list(update_data.keys()),
changed_details=changed_fields,
)
# Clear any previous errors from session
request.session.pop("case_update_errors", None)
@@ -1303,7 +1386,7 @@ async def case_update(
except Exception as e:
db.rollback()
logger.error("Failed to update case id=%s: %s", case_id, str(e))
logger.error("case_update_failed", case_id=case_id, error=str(e))
# Store error in session for display
request.session["case_update_errors"] = ["Failed to save changes. Please try again."]
@@ -1330,7 +1413,7 @@ async def case_close(
# Fetch the case
case_obj = db.query(Case).filter(Case.id == case_id).first()
if not case_obj:
logger.warning("Case not found for close: id=%s", case_id)
logger.warning("case_not_found_close", case_id=case_id)
return RedirectResponse(url=f"/case/{case_id}", status_code=302)
# Update case
@@ -1341,13 +1424,13 @@ async def case_close(
case_obj.close_date = datetime.now()
db.commit()
logger.info("Case closed: id=%s, close_date=%s", case_id, case_obj.close_date)
logger.info("case_closed", case_id=case_id, close_date=case_obj.close_date.isoformat() if case_obj.close_date else None)
return RedirectResponse(url=f"/case/{case_id}?saved=1", status_code=302)
except Exception as e:
db.rollback()
logger.error("Failed to close case id=%s: %s", case_id, str(e))
logger.error("case_close_failed", case_id=case_id, error=str(e))
# Store error in session for display
request.session["case_update_errors"] = ["Failed to close case. Please try again."]
@@ -1374,7 +1457,7 @@ async def case_reopen(
# Fetch the case
case_obj = db.query(Case).filter(Case.id == case_id).first()
if not case_obj:
logger.warning("Case not found for reopen: id=%s", case_id)
logger.warning("case_not_found_reopen", case_id=case_id)
return RedirectResponse(url=f"/case/{case_id}", status_code=302)
# Update case
@@ -1383,13 +1466,13 @@ async def case_reopen(
case_obj.close_date = None
db.commit()
logger.info("Case reopened: id=%s", case_id)
logger.info("case_reopened", case_id=case_id)
return RedirectResponse(url=f"/case/{case_id}?saved=1", status_code=302)
except Exception as e:
db.rollback()
logger.error("Failed to reopen case id=%s: %s", case_id, str(e))
logger.error("case_reopen_failed", case_id=case_id, error=str(e))
# Store error in session for display
request.session["case_update_errors"] = ["Failed to reopen case. Please try again."]

View File

@@ -2,4 +2,4 @@
# https://curl.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.
#HttpOnly_localhost FALSE / FALSE 1761009191 session eyJ1c2VyX2lkIjogMSwgInVzZXIiOiB7ImlkIjogMSwgInVzZXJuYW1lIjogImFkbWluIn19.aORpJw.oMEiA8ZMjrlLoJlYpDsM_T5EMpk
#HttpOnly_localhost FALSE / FALSE 1761016563 session eyJ1c2VyX2lkIjogMSwgInVzZXIiOiB7ImlkIjogMSwgInVzZXJuYW1lIjogImFkbWluIn19.aOSF8w.gmvSLjQ8LTg_OFCZNUZppoDIjrY

BIN
delphi.db

Binary file not shown.

View File

@@ -1,22 +1,24 @@
version: "3.9"
version: '3.8'
services:
web:
delphi-db:
build: .
container_name: delphicg-web
ports:
- "8000:8000"
environment:
- SECRET_KEY=${SECRET_KEY}
- SECRET_KEY=your-secret-key-here-change-this-in-production
- DATABASE_URL=sqlite:///./delphi.db
volumes:
- ./data-import:/app/data-import
# Mount the database file so it persists between container restarts
- ./delphi.db:/app/delphi.db
- ./old-csv:/app/old-csv:ro
- ./static/logo:/app/static/logo
# Mount data-import directory for file uploads
- ./data-import:/app/data-import
# Mount static files
- ./static:/app/static
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8000/health || exit 1"]
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 3s
timeout: 10s
retries: 3
start_period: 40s

View File

@@ -8,3 +8,5 @@ python-dotenv==1.0.0
uvicorn[standard]==0.24.0
jinja2==3.1.2
aiofiles==23.2.1
structlog==24.1.0
itsdangerous==2.2.0