diff --git a/Dockerfile b/Dockerfile index 0d44b75..d95641c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,6 +7,7 @@ WORKDIR /app # Install system dependencies RUN apt-get update && apt-get install -y \ gcc \ + curl \ && rm -rf /var/lib/apt/lists/* # Copy requirements first for better layer caching diff --git a/app/__pycache__/logging_config.cpython-313.pyc b/app/__pycache__/logging_config.cpython-313.pyc new file mode 100644 index 0000000..45d1acb Binary files /dev/null and b/app/__pycache__/logging_config.cpython-313.pyc differ diff --git a/app/__pycache__/main.cpython-313.pyc b/app/__pycache__/main.cpython-313.pyc index 68e7316..8386186 100644 Binary files a/app/__pycache__/main.cpython-313.pyc and b/app/__pycache__/main.cpython-313.pyc differ diff --git a/app/logging_config.py b/app/logging_config.py new file mode 100644 index 0000000..bd1fa61 --- /dev/null +++ b/app/logging_config.py @@ -0,0 +1,57 @@ +""" +Structured logging configuration for the Delphi Database FastAPI app. + +This module configures structlog to output JSON logs and integrates +context variables so request-specific fields (e.g., request_id) are +included automatically in log records. +""" + +from __future__ import annotations + +import logging +from typing import Any, Dict + +import structlog + + +def _add_required_defaults(_: Any, __: str, event_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + Ensure all required fields exist on every log entry so downstream + consumers receive a consistent schema. + """ + # Required fields per project requirements + event_dict.setdefault("request_id", None) + event_dict.setdefault("http.method", None) + event_dict.setdefault("http.path", None) + event_dict.setdefault("status_code", None) + event_dict.setdefault("user.id", None) + event_dict.setdefault("duration_ms", None) + return event_dict + + +def setup_logging(log_level: int = logging.INFO) -> None: + """ + Configure structlog for JSON logging with contextvars support. + + Args: + log_level: Minimum log level for application logs. + """ + # Configure stdlib logging basic config for third-party libs (uvicorn, etc.) + logging.basicConfig(level=log_level) + + structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_log_level, + _add_required_defaults, + structlog.processors.TimeStamper(fmt="iso", key="timestamp"), + structlog.processors.dict_tracebacks, + structlog.processors.JSONRenderer(), + ], + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.make_filtering_bound_logger(log_level), + cache_logger_on_first_use=True, + ) + + diff --git a/app/main.py b/app/main.py index 8e6a9b6..c8cc057 100644 --- a/app/main.py +++ b/app/main.py @@ -6,7 +6,7 @@ and provides the main application instance. """ import os -import logging +import time import csv import json import uuid @@ -25,10 +25,13 @@ from sqlalchemy.orm import Session, joinedload from sqlalchemy import or_ from dotenv import load_dotenv from starlette.middleware.base import BaseHTTPMiddleware +import structlog +from structlog import contextvars as structlog_contextvars from .database import create_tables, get_db, get_database_url from .models import User, Case, Client, Phone, Transaction, Document, Payment, ImportLog from .auth import authenticate_user, get_current_user_from_session +from .logging_config import setup_logging # Load environment variables load_dotenv() @@ -38,9 +41,9 @@ SECRET_KEY = os.getenv("SECRET_KEY") if not SECRET_KEY: raise ValueError("SECRET_KEY environment variable must be set") -# Configure logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) +# Configure structured logging +setup_logging() +logger = structlog.get_logger(__name__) # Configure Jinja2 templates templates = Jinja2Templates(directory="app/templates") @@ -74,6 +77,61 @@ class AuthMiddleware(BaseHTTPMiddleware): return await call_next(request) + +class RequestIdMiddleware(BaseHTTPMiddleware): + """ + Middleware that assigns a request_id and binds request context for logging. + + Adds: request_id, http.method, http.path, user.id to the structlog context. + Emits a JSON access log with status_code and duration_ms after response. + """ + + async def dispatch(self, request: Request, call_next): + start_time = time.perf_counter() + + request_id = request.headers.get("X-Request-ID") or str(uuid.uuid4()) + method = request.method + path = request.url.path + + # user id from session if available (SessionMiddleware runs first) + user_id = request.session.get("user_id") if hasattr(request, "session") else None + + structlog_contextvars.bind_contextvars( + request_id=request_id, + **{"http.method": method, "http.path": path, "user.id": user_id}, + ) + + try: + response = await call_next(request) + status_code = response.status_code + except Exception as exc: # noqa: BLE001 - we re-raise after logging + status_code = 500 + duration_ms = int((time.perf_counter() - start_time) * 1000) + logger.error( + "request", + status_code=status_code, + duration_ms=duration_ms, + exc_info=True, + ) + structlog_contextvars.unbind_contextvars("request_id", "http.method", "http.path", "user.id") + raise + + # Ensure response header has request id + try: + response.headers["X-Request-ID"] = request_id + except Exception: + pass + + duration_ms = int((time.perf_counter() - start_time) * 1000) + logger.info( + "request", + status_code=status_code, + duration_ms=duration_ms, + ) + + structlog_contextvars.unbind_contextvars("request_id", "http.method", "http.path", "user.id") + return response + @asynccontextmanager async def lifespan(app: FastAPI): """ @@ -84,20 +142,20 @@ async def lifespan(app: FastAPI): - Logs database connection info """ # Startup - logger.info("Starting Delphi Database application...") + logger.info("app_start") # Create database tables create_tables() - logger.info("Database tables created/verified") + logger.info("db_tables_verified") # Log database connection info db_url = get_database_url() - logger.info(f"Database connected: {db_url}") + logger.info("db_connected", database_url=db_url) yield # Shutdown - logger.info("Shutting down Delphi Database application...") + logger.info("app_shutdown") # Create FastAPI application with lifespan management @@ -117,8 +175,9 @@ app.add_middleware( allow_headers=["*"], ) -# Register authentication middleware with exempt paths +# Register request logging and authentication middleware with exempt paths EXEMPT_PATHS = ["/", "/health", "/login", "/logout"] +app.add_middleware(RequestIdMiddleware) app.add_middleware(AuthMiddleware, exempt_paths=EXEMPT_PATHS) # Add SessionMiddleware for session management (must be added LAST so it runs FIRST) @@ -227,7 +286,7 @@ def parse_date(date_str: str) -> Optional[datetime]: except ValueError: continue - logger.warning(f"Could not parse date: '{date_str}'") + logger.warning("parse_date_failed", value=date_str) return None @@ -239,7 +298,7 @@ def parse_float(value: str) -> Optional[float]: try: return float(value.strip()) except ValueError: - logger.warning(f"Could not parse float: '{value}'") + logger.warning("parse_float_failed", value=value) return None @@ -251,7 +310,7 @@ def parse_int(value: str) -> Optional[int]: try: return int(value.strip()) except ValueError: - logger.warning(f"Could not parse int: '{value}'") + logger.warning("parse_int_failed", value=value) return None @@ -754,7 +813,7 @@ async def health_check(db: Session = Depends(get_db)): "users": user_count } except Exception as e: - logger.error(f"Health check failed: {e}") + logger.error("health_check_failed", error=str(e)) return { "status": "unhealthy", "database": "error", @@ -790,6 +849,7 @@ async def login_submit(request: Request, db: Session = Depends(get_db)): if not username or not password: error_message = "Username and password are required" + logger.warning("login_failed", username=username, reason="missing_credentials") return templates.TemplateResponse("login.html", { "request": request, "error": error_message @@ -799,6 +859,7 @@ async def login_submit(request: Request, db: Session = Depends(get_db)): user = authenticate_user(username, password) if not user: error_message = "Invalid username or password" + logger.warning("login_failed", username=username, reason="invalid_credentials") return templates.TemplateResponse("login.html", { "request": request, "error": error_message @@ -808,7 +869,9 @@ async def login_submit(request: Request, db: Session = Depends(get_db)): request.session["user_id"] = user.id request.session["user"] = {"id": user.id, "username": user.username} - logger.info(f"User '{username}' logged in successfully") + # Update bound context with authenticated user id + structlog_contextvars.bind_contextvars(**{"user.id": user.id}) + logger.info("login_success", username=username, **{"user.id": user.id}) # Redirect to dashboard after successful login return RedirectResponse(url="/dashboard", status_code=302) @@ -823,7 +886,7 @@ async def logout(request: Request): """ username = request.session.get("user", {}).get("username", "unknown") request.session.clear() - logger.info(f"User '{username}' logged out") + logger.info("logout", username=username) return RedirectResponse(url="/", status_code=302) @@ -883,11 +946,11 @@ async def dashboard( page_numbers = list(range(start_page, end_page + 1)) logger.info( - "Rendering dashboard: q='%s', page=%s, page_size=%s, total=%s", - q, - page, - page_size, - total, + "dashboard_render", + query=q, + page=page, + page_size=page_size, + total=total, ) return templates.TemplateResponse( @@ -971,7 +1034,12 @@ async def admin_upload_files( continue # Log the upload operation - logger.info(f"Admin upload: {len(results)} files uploaded, {len(errors)} errors by user '{user.username}'") + logger.info( + "admin_upload", + uploaded_count=len(results), + error_count=len(errors), + username=user.username, + ) return templates.TemplateResponse("admin.html", { "request": request, @@ -1085,7 +1153,13 @@ async def admin_import_data( total_errors += 1 # Log the import operation - logger.info(f"Admin import: {data_type}, {total_success} success, {total_errors} errors by user '{user.username}'") + logger.info( + "admin_import", + import_type=data_type, + success_count=total_success, + error_count=total_errors, + username=user.username, + ) return templates.TemplateResponse("admin.html", { "request": request, @@ -1181,7 +1255,7 @@ async def case_detail( ) if not case_obj: - logger.warning("Case not found: id=%s", case_id) + logger.warning("case_not_found", case_id=case_id) # Get any errors from session and clear them errors = request.session.pop("case_update_errors", None) @@ -1198,7 +1272,7 @@ async def case_detail( status_code=404, ) - logger.info("Rendering case detail: id=%s, file_no='%s'", case_obj.id, case_obj.file_no) + logger.info("case_detail", case_id=case_obj.id, file_no=case_obj.file_no) # Get any errors from session and clear them errors = request.session.pop("case_update_errors", None) @@ -1237,7 +1311,7 @@ async def case_update( # Fetch the case case_obj = db.query(Case).filter(Case.id == case_id).first() if not case_obj: - logger.warning("Case not found for update: id=%s", case_id) + logger.warning("case_not_found_update", case_id=case_id) return RedirectResponse(url=f"/case/{case_id}", status_code=302) # Validate and process fields @@ -1290,11 +1364,20 @@ async def case_update( # Apply updates try: + changed_fields = {} for field, value in update_data.items(): + old_value = getattr(case_obj, field) + if old_value != value: + changed_fields[field] = {"old": old_value, "new": value} setattr(case_obj, field, value) db.commit() - logger.info("Case updated successfully: id=%s, fields=%s", case_id, list(update_data.keys())) + logger.info( + "case_update", + case_id=case_id, + changed_fields=list(update_data.keys()), + changed_details=changed_fields, + ) # Clear any previous errors from session request.session.pop("case_update_errors", None) @@ -1303,7 +1386,7 @@ async def case_update( except Exception as e: db.rollback() - logger.error("Failed to update case id=%s: %s", case_id, str(e)) + logger.error("case_update_failed", case_id=case_id, error=str(e)) # Store error in session for display request.session["case_update_errors"] = ["Failed to save changes. Please try again."] @@ -1330,7 +1413,7 @@ async def case_close( # Fetch the case case_obj = db.query(Case).filter(Case.id == case_id).first() if not case_obj: - logger.warning("Case not found for close: id=%s", case_id) + logger.warning("case_not_found_close", case_id=case_id) return RedirectResponse(url=f"/case/{case_id}", status_code=302) # Update case @@ -1341,13 +1424,13 @@ async def case_close( case_obj.close_date = datetime.now() db.commit() - logger.info("Case closed: id=%s, close_date=%s", case_id, case_obj.close_date) + logger.info("case_closed", case_id=case_id, close_date=case_obj.close_date.isoformat() if case_obj.close_date else None) return RedirectResponse(url=f"/case/{case_id}?saved=1", status_code=302) except Exception as e: db.rollback() - logger.error("Failed to close case id=%s: %s", case_id, str(e)) + logger.error("case_close_failed", case_id=case_id, error=str(e)) # Store error in session for display request.session["case_update_errors"] = ["Failed to close case. Please try again."] @@ -1374,7 +1457,7 @@ async def case_reopen( # Fetch the case case_obj = db.query(Case).filter(Case.id == case_id).first() if not case_obj: - logger.warning("Case not found for reopen: id=%s", case_id) + logger.warning("case_not_found_reopen", case_id=case_id) return RedirectResponse(url=f"/case/{case_id}", status_code=302) # Update case @@ -1383,13 +1466,13 @@ async def case_reopen( case_obj.close_date = None db.commit() - logger.info("Case reopened: id=%s", case_id) + logger.info("case_reopened", case_id=case_id) return RedirectResponse(url=f"/case/{case_id}?saved=1", status_code=302) except Exception as e: db.rollback() - logger.error("Failed to reopen case id=%s: %s", case_id, str(e)) + logger.error("case_reopen_failed", case_id=case_id, error=str(e)) # Store error in session for display request.session["case_update_errors"] = ["Failed to reopen case. Please try again."] diff --git a/cookies.txt b/cookies.txt index f5452af..7fa9466 100644 --- a/cookies.txt +++ b/cookies.txt @@ -2,4 +2,4 @@ # https://curl.se/docs/http-cookies.html # This file was generated by libcurl! Edit at your own risk. -#HttpOnly_localhost FALSE / FALSE 1761009191 session eyJ1c2VyX2lkIjogMSwgInVzZXIiOiB7ImlkIjogMSwgInVzZXJuYW1lIjogImFkbWluIn19.aORpJw.oMEiA8ZMjrlLoJlYpDsM_T5EMpk +#HttpOnly_localhost FALSE / FALSE 1761016563 session eyJ1c2VyX2lkIjogMSwgInVzZXIiOiB7ImlkIjogMSwgInVzZXJuYW1lIjogImFkbWluIn19.aOSF8w.gmvSLjQ8LTg_OFCZNUZppoDIjrY diff --git a/delphi.db b/delphi.db index 75208c5..e233fb6 100644 Binary files a/delphi.db and b/delphi.db differ diff --git a/requirements.txt b/requirements.txt index c5dee4f..dcd7a64 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,5 @@ python-dotenv==1.0.0 uvicorn[standard]==0.24.0 jinja2==3.1.2 aiofiles==23.2.1 +structlog==24.1.0 +itsdangerous==2.2.0