remove old import

This commit is contained in:
HotSwapp
2025-08-14 21:27:34 -05:00
parent bfc04a6909
commit 679ab4446a
17 changed files with 2016 additions and 557 deletions

View File

@@ -27,6 +27,8 @@ from app.auth.security import get_admin_user, get_password_hash, create_access_t
from app.services.audit import audit_service
from app.config import settings
from app.services.query_utils import apply_sorting, tokenized_ilike_filter, paginate_with_total
from app.utils.exceptions import handle_database_errors, safe_execute
from app.utils.logging import app_logger
router = APIRouter()
@@ -304,7 +306,8 @@ async def system_health(
disk_available = free_gb > 1.0 # 1GB minimum
if not disk_available:
alerts.append(f"Low disk space: {free_gb:.1f}GB remaining")
except:
except (OSError, ImportError) as e:
app_logger.warning(f"Could not check disk space: {str(e)}")
disk_available = True
# Check memory (simplified)
@@ -331,7 +334,8 @@ async def system_health(
active_sessions = db.query(User).filter(
User.last_login > datetime.now(timezone.utc) - timedelta(hours=24)
).count()
except:
except Exception as e:
app_logger.warning(f"Could not query active sessions: {str(e)}")
active_sessions = 0
# Check last backup
@@ -346,7 +350,8 @@ async def system_health(
last_backup = latest_backup.name
if backup_age.days > 7:
alerts.append(f"Last backup is {backup_age.days} days old")
except:
except (OSError, FileNotFoundError) as e:
app_logger.warning(f"Could not check backup status: {str(e)}")
alerts.append("Unable to check backup status")
# Application uptime
@@ -398,8 +403,8 @@ async def system_statistics(
if os.path.exists(db_path):
size_bytes = os.path.getsize(db_path)
db_size = f"{size_bytes / (1024*1024):.1f} MB"
except:
pass
except (OSError, ValueError) as e:
app_logger.warning(f"Could not get database size: {str(e)}")
# Check for recent backups
last_backup = "Not found"
@@ -410,8 +415,8 @@ async def system_statistics(
if backup_files:
latest_backup = max(backup_files, key=lambda p: p.stat().st_mtime)
last_backup = latest_backup.name
except:
pass
except (OSError, FileNotFoundError) as e:
app_logger.warning(f"Could not check for recent backups: {str(e)}")
# Application uptime
uptime_seconds = int(time.time() - APPLICATION_START_TIME)
@@ -437,8 +442,8 @@ async def system_statistics(
"description": f"Customer {customer.first} {customer.last} added",
"timestamp": datetime.now(timezone.utc).isoformat()
})
except:
pass
except Exception as e:
app_logger.warning(f"Could not get recent activity: {str(e)}")
return SystemStats(
total_customers=total_customers,

View File

@@ -16,6 +16,8 @@ from app.auth.security import get_current_user
from app.services.cache import invalidate_search_cache
from app.services.customers_search import apply_customer_filters, apply_customer_sorting, prepare_customer_csv_rows
from app.services.query_utils import apply_sorting, paginate_with_total
from app.utils.logging import app_logger
from app.utils.database import db_transaction
router = APIRouter()
@@ -321,14 +323,15 @@ async def create_customer(
)
customer = Rolodex(**customer_data.model_dump())
db.add(customer)
db.commit()
db.refresh(customer)
with db_transaction(db) as session:
session.add(customer)
session.flush()
session.refresh(customer)
try:
await invalidate_search_cache()
except Exception:
pass
except Exception as e:
app_logger.warning(f"Failed to invalidate search cache: {str(e)}")
return customer
@@ -352,12 +355,13 @@ async def update_customer(
for field, value in customer_data.model_dump(exclude_unset=True).items():
setattr(customer, field, value)
db.commit()
db.refresh(customer)
with db_transaction(db) as session:
session.flush()
session.refresh(customer)
try:
await invalidate_search_cache()
except Exception:
pass
except Exception as e:
app_logger.warning(f"Failed to invalidate search cache: {str(e)}")
return customer
@@ -376,12 +380,12 @@ async def delete_customer(
detail="Customer not found"
)
db.delete(customer)
db.commit()
with db_transaction(db) as session:
session.delete(customer)
try:
await invalidate_search_cache()
except Exception:
pass
except Exception as e:
app_logger.warning(f"Failed to invalidate search cache: {str(e)}")
return {"message": "Customer deleted successfully"}

View File

@@ -25,6 +25,7 @@ from app.models.additional import Payment, Deposit, FileNote, FormVariable, Repo
from app.models.flexible import FlexibleImport
from app.models.audit import ImportAudit, ImportAuditFile
from app.config import settings
from app.utils.logging import import_logger
router = APIRouter(tags=["import"])
@@ -931,7 +932,7 @@ async def import_csv_data(
rows_data.append(row_dict)
except Exception as row_error:
print(f"Skipping malformed row {line_num}: {row_error}")
import_logger.log_import_error(line_num, str(row_error), dict(zip(headers, fields)) if len(fields) <= len(headers) else None)
skipped_rows += 1
continue

View File

@@ -26,6 +26,7 @@ from app.models.lookups import FormIndex, Employee, FileType, FileStatus, Transa
from app.models.user import User
from app.auth.security import get_current_user
from app.services.cache import cache_get_json, cache_set_json
from app.utils.logging import app_logger
router = APIRouter()
@router.get("/_debug")
@@ -48,15 +49,16 @@ async def search_debug(
fts_status["files"] = "files_fts" in names
fts_status["ledger"] = "ledger_fts" in names
fts_status["qdros"] = "qdros_fts" in names
except Exception:
pass
except Exception as e:
app_logger.warning(f"Failed to check FTS status: {str(e)}")
# Detect Redis by trying to obtain a client
try:
from app.services.cache import _get_client # type: ignore
client = await _get_client()
redis_ok = client is not None
except Exception:
except Exception as e:
app_logger.debug(f"Redis not available: {str(e)}")
redis_ok = False
return {
"fts": fts_status,

View File

@@ -6,10 +6,11 @@ already-initialized database. Safe to call multiple times.
"""
from typing import Dict
from sqlalchemy.engine import Engine
from sqlalchemy import text
def _existing_columns(conn, table: str) -> set[str]:
rows = conn.execute(f"PRAGMA table_info('{table}')").fetchall()
rows = conn.execute(text(f"PRAGMA table_info('{table}')")).fetchall()
return {row[1] for row in rows} # name is column 2
@@ -122,7 +123,7 @@ def ensure_schema_updates(engine: Engine) -> None:
for col_name, col_type in cols.items():
if col_name not in existing:
try:
conn.execute(f"ALTER TABLE {table} ADD COLUMN {col_name} {col_type}")
conn.execute(text(f"ALTER TABLE {table} ADD COLUMN {col_name} {col_type}"))
except Exception:
# Ignore if not applicable (other engines) or race condition
pass

View File

@@ -12,10 +12,11 @@ from typing import Any, Optional
try:
import redis.asyncio as redis # type: ignore
except Exception: # pragma: no cover - allow running without redis installed
except ImportError: # pragma: no cover - allow running without redis installed
redis = None # type: ignore
from app.config import settings
from app.utils.logging import app_logger
_client: Optional["redis.Redis"] = None # type: ignore
@@ -35,14 +36,15 @@ async def _get_client() -> Optional["redis.Redis"]: # type: ignore
if _client is None:
try:
_client = redis.from_url(settings.redis_url, decode_responses=True) # type: ignore
except Exception:
except Exception as e:
app_logger.debug(f"Redis connection failed: {str(e)}")
_client = None
return _client
def _stable_hash(obj: Any) -> str:
data = json.dumps(obj, sort_keys=True, separators=(",", ":"))
return hashlib.sha1(data.encode("utf-8")).hexdigest()
return hashlib.sha256(data.encode("utf-8")).hexdigest()
def build_key(kind: str, user_id: Optional[str], parts: dict) -> str:

133
app/utils/__init__.py Normal file
View File

@@ -0,0 +1,133 @@
"""
Utility modules for the application.
"""
from .exceptions import (
handle_database_errors,
handle_validation_errors,
handle_security_errors,
safe_execute,
create_error_response,
ErrorContext,
APIError,
DatabaseError,
BusinessLogicError,
SecurityError
)
from .logging import (
StructuredLogger,
ImportLogger,
SecurityLogger,
DatabaseLogger,
log_function_call,
app_logger,
import_logger,
security_logger,
database_logger,
log_info,
log_warning,
log_error,
log_debug
)
from .database import (
TransactionManager,
db_transaction,
transactional,
BulkOperationManager,
safe_db_operation,
execute_with_retry
)
from .security import (
CredentialValidator,
PasswordStrengthValidator,
audit_code_security,
hash_password_securely,
verify_password,
SecurityFinding,
SecurityLevel
)
from .responses import (
ErrorCode,
ErrorDetail,
ValidationErrorDetail,
ErrorResponse,
ValidationErrorResponse,
SuccessResponse,
PaginatedResponse,
BulkOperationResponse,
create_error_response,
create_validation_error_response,
create_success_response,
create_not_found_response,
create_conflict_response,
create_unauthorized_response,
create_forbidden_response,
get_status_code_for_error
)
__all__ = [
# Exception handling
'handle_database_errors',
'handle_validation_errors',
'handle_security_errors',
'safe_execute',
'create_error_response',
'ErrorContext',
'APIError',
'DatabaseError',
'BusinessLogicError',
'SecurityError',
# Logging
'StructuredLogger',
'ImportLogger',
'SecurityLogger',
'DatabaseLogger',
'log_function_call',
'app_logger',
'import_logger',
'security_logger',
'database_logger',
'log_info',
'log_warning',
'log_error',
'log_debug',
# Database utilities
'TransactionManager',
'db_transaction',
'transactional',
'BulkOperationManager',
'safe_db_operation',
'execute_with_retry',
# Security utilities
'CredentialValidator',
'PasswordStrengthValidator',
'audit_code_security',
'hash_password_securely',
'verify_password',
'SecurityFinding',
'SecurityLevel',
# Response utilities
'ErrorCode',
'ErrorDetail',
'ValidationErrorDetail',
'ErrorResponse',
'ValidationErrorResponse',
'SuccessResponse',
'PaginatedResponse',
'BulkOperationResponse',
'create_error_response',
'create_validation_error_response',
'create_success_response',
'create_not_found_response',
'create_conflict_response',
'create_unauthorized_response',
'create_forbidden_response',
'get_status_code_for_error'
]

326
app/utils/database.py Normal file
View File

@@ -0,0 +1,326 @@
"""
Database transaction management utilities for consistent transaction handling.
"""
from typing import Callable, Any, Optional, TypeVar, Type
from functools import wraps
from contextlib import contextmanager
import logging
from sqlalchemy.orm import Session
from sqlalchemy.exc import SQLAlchemyError
from app.utils.exceptions import DatabaseError, handle_database_errors
from app.utils.logging import database_logger
T = TypeVar('T')
class TransactionManager:
"""Context manager for database transactions with automatic rollback on errors."""
def __init__(self, db_session: Session, auto_commit: bool = True, auto_rollback: bool = True):
self.db_session = db_session
self.auto_commit = auto_commit
self.auto_rollback = auto_rollback
self.committed = False
self.rolled_back = False
def __enter__(self) -> Session:
"""Enter transaction context."""
database_logger.log_transaction_event("started", {
"auto_commit": self.auto_commit,
"auto_rollback": self.auto_rollback
})
return self.db_session
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit transaction context with appropriate commit/rollback."""
try:
if exc_type is not None:
# Exception occurred
if self.auto_rollback and not self.rolled_back:
self.rollback()
database_logger.log_transaction_event("auto_rollback", {
"exception_type": exc_type.__name__ if exc_type else None,
"exception_message": str(exc_val) if exc_val else None
})
return False # Re-raise the exception
else:
# No exception
if self.auto_commit and not self.committed:
self.commit()
database_logger.log_transaction_event("auto_commit")
except Exception as e:
# Error during commit/rollback
database_logger.error(f"Error during transaction cleanup: {str(e)}")
if not self.rolled_back:
try:
self.rollback()
except Exception:
pass # Best effort rollback
raise
def commit(self):
"""Manually commit the transaction."""
if not self.committed and not self.rolled_back:
try:
self.db_session.commit()
self.committed = True
database_logger.log_transaction_event("manual_commit")
except SQLAlchemyError as e:
database_logger.error(f"Transaction commit failed: {str(e)}")
self.rollback()
raise DatabaseError(f"Failed to commit transaction: {str(e)}")
def rollback(self):
"""Manually rollback the transaction."""
if not self.rolled_back:
try:
self.db_session.rollback()
self.rolled_back = True
database_logger.log_transaction_event("manual_rollback")
except SQLAlchemyError as e:
database_logger.error(f"Transaction rollback failed: {str(e)}")
raise DatabaseError(f"Failed to rollback transaction: {str(e)}")
@contextmanager
def db_transaction(db_session: Session, auto_commit: bool = True, auto_rollback: bool = True):
"""
Context manager for database transactions.
Args:
db_session: SQLAlchemy session
auto_commit: Whether to auto-commit on successful completion
auto_rollback: Whether to auto-rollback on exceptions
Yields:
Session: The database session
Example:
with db_transaction(db) as session:
session.add(new_record)
# Auto-commits on exit if no exceptions
"""
with TransactionManager(db_session, auto_commit, auto_rollback) as session:
yield session
def transactional(auto_commit: bool = True, auto_rollback: bool = True):
"""
Decorator to wrap functions in database transactions.
Args:
auto_commit: Whether to auto-commit on successful completion
auto_rollback: Whether to auto-rollback on exceptions
Note:
The decorated function must accept a 'db' parameter that is a SQLAlchemy session.
Example:
@transactional()
def create_user(user_data: dict, db: Session):
user = User(**user_data)
db.add(user)
return user
"""
def decorator(func: Callable[..., T]) -> Callable[..., T]:
@wraps(func)
def wrapper(*args, **kwargs) -> T:
# Find the db session in arguments
db_session = None
# Check kwargs first
if 'db' in kwargs:
db_session = kwargs['db']
else:
# Check args for Session instance
for arg in args:
if isinstance(arg, Session):
db_session = arg
break
if db_session is None:
raise ValueError("Function must have a 'db' parameter with SQLAlchemy Session")
with db_transaction(db_session, auto_commit, auto_rollback):
return func(*args, **kwargs)
return wrapper
return decorator
class BulkOperationManager:
"""Manager for bulk database operations with batching and progress tracking."""
def __init__(self, db_session: Session, batch_size: int = 1000):
self.db_session = db_session
self.batch_size = batch_size
self.processed_count = 0
self.error_count = 0
def bulk_insert(self, records: list, model_class: Type):
"""
Perform bulk insert with batching and error handling.
Args:
records: List of record dictionaries
model_class: SQLAlchemy model class
Returns:
dict: Summary of operation results
"""
total_records = len(records)
database_logger.info(f"Starting bulk insert of {total_records} {model_class.__name__} records")
try:
with db_transaction(self.db_session) as session:
for i in range(0, total_records, self.batch_size):
batch = records[i:i + self.batch_size]
try:
# Insert batch
session.bulk_insert_mappings(model_class, batch)
session.flush() # Flush but don't commit yet
self.processed_count += len(batch)
# Log progress
if self.processed_count % (self.batch_size * 10) == 0:
database_logger.info(f"Bulk insert progress: {self.processed_count}/{total_records}")
except SQLAlchemyError as e:
self.error_count += len(batch)
database_logger.error(f"Batch insert failed for records {i}-{i+len(batch)}: {str(e)}")
raise DatabaseError(f"Bulk insert failed: {str(e)}")
# Final commit happens automatically via context manager
except Exception as e:
database_logger.error(f"Bulk insert operation failed: {str(e)}")
raise
summary = {
"total_records": total_records,
"processed": self.processed_count,
"errors": self.error_count,
"success_rate": (self.processed_count / total_records) * 100 if total_records > 0 else 0
}
database_logger.info(f"Bulk insert completed", **summary)
return summary
def bulk_update(self, updates: list, model_class: Type, key_field: str = 'id'):
"""
Perform bulk update with batching and error handling.
Args:
updates: List of update dictionaries (must include key_field)
model_class: SQLAlchemy model class
key_field: Field name to use as update key
Returns:
dict: Summary of operation results
"""
total_updates = len(updates)
database_logger.info(f"Starting bulk update of {total_updates} {model_class.__name__} records")
try:
with db_transaction(self.db_session) as session:
for i in range(0, total_updates, self.batch_size):
batch = updates[i:i + self.batch_size]
try:
# Update batch
session.bulk_update_mappings(model_class, batch)
session.flush() # Flush but don't commit yet
self.processed_count += len(batch)
# Log progress
if self.processed_count % (self.batch_size * 10) == 0:
database_logger.info(f"Bulk update progress: {self.processed_count}/{total_updates}")
except SQLAlchemyError as e:
self.error_count += len(batch)
database_logger.error(f"Batch update failed for records {i}-{i+len(batch)}: {str(e)}")
raise DatabaseError(f"Bulk update failed: {str(e)}")
# Final commit happens automatically via context manager
except Exception as e:
database_logger.error(f"Bulk update operation failed: {str(e)}")
raise
summary = {
"total_updates": total_updates,
"processed": self.processed_count,
"errors": self.error_count,
"success_rate": (self.processed_count / total_updates) * 100 if total_updates > 0 else 0
}
database_logger.info(f"Bulk update completed", **summary)
return summary
def safe_db_operation(operation: Callable, db_session: Session, default_return: Any = None) -> Any:
"""
Safely execute a database operation with automatic rollback on errors.
Args:
operation: Function to execute (should accept db_session as parameter)
db_session: SQLAlchemy session
default_return: Value to return on failure
Returns:
Result of operation or default_return on failure
"""
try:
with db_transaction(db_session, auto_rollback=True) as session:
return operation(session)
except Exception as e:
database_logger.error(f"Database operation failed: {str(e)}")
return default_return
def execute_with_retry(
operation: Callable,
db_session: Session,
max_retries: int = 3,
retry_delay: float = 1.0
) -> Any:
"""
Execute database operation with retry logic for transient failures.
Args:
operation: Function to execute
db_session: SQLAlchemy session
max_retries: Maximum number of retry attempts
retry_delay: Delay between retries in seconds
Returns:
Result of successful operation
Raises:
DatabaseError: If all retry attempts fail
"""
import time
last_exception = None
for attempt in range(max_retries + 1):
try:
with db_transaction(db_session) as session:
return operation(session)
except SQLAlchemyError as e:
last_exception = e
if attempt < max_retries:
database_logger.warning(
f"Database operation failed (attempt {attempt + 1}/{max_retries + 1}): {str(e)}"
)
time.sleep(retry_delay)
else:
database_logger.error(f"Database operation failed after {max_retries + 1} attempts: {str(e)}")
raise DatabaseError(f"Operation failed after {max_retries + 1} attempts: {str(last_exception)}")

246
app/utils/exceptions.py Normal file
View File

@@ -0,0 +1,246 @@
"""
Centralized exception handling utilities for consistent error management across the application.
"""
from typing import Dict, Any, Optional, Type, Union, Callable
from functools import wraps
import logging
from fastapi import HTTPException, status
from sqlalchemy.exc import SQLAlchemyError, IntegrityError, DataError
from pydantic import ValidationError
import traceback
logger = logging.getLogger(__name__)
class DatabaseError(Exception):
"""Custom exception for database-related errors"""
pass
class BusinessLogicError(Exception):
"""Custom exception for business logic violations"""
pass
class SecurityError(Exception):
"""Custom exception for security-related errors"""
pass
class APIError(HTTPException):
"""Enhanced HTTP exception with additional context"""
def __init__(
self,
status_code: int,
detail: str,
error_code: str = None,
context: Dict[str, Any] = None
):
super().__init__(status_code=status_code, detail=detail)
self.error_code = error_code
self.context = context or {}
def handle_database_errors(func: Callable) -> Callable:
"""
Decorator to handle common database errors with consistent responses.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except IntegrityError as e:
logger.error(f"Database integrity error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Data integrity constraint violation",
error_code="INTEGRITY_ERROR",
context={"function": func.__name__}
)
except DataError as e:
logger.error(f"Database data error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid data format or type",
error_code="DATA_ERROR",
context={"function": func.__name__}
)
except SQLAlchemyError as e:
logger.error(f"Database error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Database operation failed",
error_code="DATABASE_ERROR",
context={"function": func.__name__}
)
except Exception as e:
logger.error(f"Unexpected error in {func.__name__}: {str(e)}", exc_info=True)
raise APIError(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="An unexpected error occurred",
error_code="INTERNAL_ERROR",
context={"function": func.__name__}
)
return wrapper
def handle_validation_errors(func: Callable) -> Callable:
"""
Decorator to handle validation errors with consistent responses.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except ValidationError as e:
logger.warning(f"Validation error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Validation failed",
error_code="VALIDATION_ERROR",
context={
"function": func.__name__,
"validation_errors": e.errors()
}
)
except ValueError as e:
logger.warning(f"Value error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
error_code="VALUE_ERROR",
context={"function": func.__name__}
)
return wrapper
def handle_security_errors(func: Callable) -> Callable:
"""
Decorator to handle security-related errors.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except SecurityError as e:
logger.warning(f"Security error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_403_FORBIDDEN,
detail="Access denied",
error_code="SECURITY_ERROR",
context={"function": func.__name__}
)
except PermissionError as e:
logger.warning(f"Permission error in {func.__name__}: {str(e)}")
raise APIError(
status_code=status.HTTP_403_FORBIDDEN,
detail="Insufficient permissions",
error_code="PERMISSION_ERROR",
context={"function": func.__name__}
)
return wrapper
def safe_execute(
operation: Callable,
default_return: Any = None,
log_errors: bool = True,
raise_on_error: bool = False,
error_message: str = "Operation failed"
) -> Any:
"""
Safely execute an operation with optional error handling.
Args:
operation: Function to execute
default_return: Value to return if operation fails
log_errors: Whether to log errors
raise_on_error: Whether to re-raise exceptions
error_message: Custom error message for logging
Returns:
Result of operation or default_return on failure
"""
try:
return operation()
except Exception as e:
if log_errors:
logger.error(f"{error_message}: {str(e)}", exc_info=True)
if raise_on_error:
raise
return default_return
def create_error_response(
error: Exception,
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
error_code: str = "INTERNAL_ERROR",
include_traceback: bool = False
) -> Dict[str, Any]:
"""
Create a standardized error response.
Args:
error: The exception that occurred
status_code: HTTP status code
error_code: Application-specific error code
include_traceback: Whether to include traceback (dev only)
Returns:
Standardized error response dictionary
"""
response = {
"error": {
"code": error_code,
"message": str(error),
"type": type(error).__name__
}
}
if include_traceback:
response["error"]["traceback"] = traceback.format_exc()
return response
class ErrorContext:
"""Context manager for handling errors in a specific scope."""
def __init__(
self,
operation_name: str,
default_return: Any = None,
log_errors: bool = True,
suppress_errors: bool = False
):
self.operation_name = operation_name
self.default_return = default_return
self.log_errors = log_errors
self.suppress_errors = suppress_errors
self.error = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
self.error = exc_val
if self.log_errors:
logger.error(
f"Error in {self.operation_name}: {str(exc_val)}",
exc_info=True
)
return self.suppress_errors
return False
def get_result(self, success_value: Any = None) -> Any:
"""Get the result based on whether an error occurred."""
if self.error is not None:
return self.default_return
return success_value

285
app/utils/logging.py Normal file
View File

@@ -0,0 +1,285 @@
"""
Standardized logging utility to replace print statements and provide consistent logging patterns.
"""
import logging
import sys
from typing import Any, Dict, Optional, Union
from functools import wraps
from datetime import datetime
import json
from pathlib import Path
from app.config import settings
class StructuredLogger:
"""Enhanced logger with structured logging capabilities."""
def __init__(self, name: str, level: str = "INFO"):
self.logger = logging.getLogger(name)
self.logger.setLevel(getattr(logging, level.upper()))
if not self.logger.handlers:
self._setup_handlers()
def _setup_handlers(self):
"""Setup console and file handlers with appropriate formatters."""
# Console handler
console_handler = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
console_handler.setFormatter(console_formatter)
self.logger.addHandler(console_handler)
# File handler (if configured)
if hasattr(settings, 'LOG_FILE_PATH') and settings.LOG_FILE_PATH:
log_path = Path(settings.LOG_FILE_PATH)
log_path.parent.mkdir(parents=True, exist_ok=True)
file_handler = logging.FileHandler(log_path)
file_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
file_handler.setFormatter(file_formatter)
self.logger.addHandler(file_handler)
def debug(self, message: str, **kwargs):
"""Log debug message with optional structured data."""
self._log(logging.DEBUG, message, **kwargs)
def info(self, message: str, **kwargs):
"""Log info message with optional structured data."""
self._log(logging.INFO, message, **kwargs)
def warning(self, message: str, **kwargs):
"""Log warning message with optional structured data."""
self._log(logging.WARNING, message, **kwargs)
def error(self, message: str, **kwargs):
"""Log error message with optional structured data."""
self._log(logging.ERROR, message, **kwargs)
def critical(self, message: str, **kwargs):
"""Log critical message with optional structured data."""
self._log(logging.CRITICAL, message, **kwargs)
def _log(self, level: int, message: str, **kwargs):
"""Internal method to log with structured data."""
if kwargs:
structured_message = f"{message} | Context: {json.dumps(kwargs, default=str)}"
else:
structured_message = message
self.logger.log(level, structured_message)
class ImportLogger(StructuredLogger):
"""Specialized logger for import operations."""
def __init__(self):
super().__init__("import_operations", "INFO")
def log_import_start(self, import_type: str, file_name: str, record_count: int = None):
"""Log the start of an import operation."""
context = {
"import_type": import_type,
"file_name": file_name,
"timestamp": datetime.now().isoformat()
}
if record_count is not None:
context["expected_records"] = record_count
self.info(f"Starting {import_type} import", **context)
def log_import_progress(self, processed: int, total: int = None, errors: int = 0):
"""Log import progress."""
context = {
"processed": processed,
"errors": errors,
"timestamp": datetime.now().isoformat()
}
if total is not None:
context["total"] = total
context["progress_percent"] = round((processed / total) * 100, 2)
self.info(f"Import progress: {processed} processed", **context)
def log_import_error(self, line_number: int, error_message: str, row_data: Dict = None):
"""Log import errors with context."""
context = {
"line_number": line_number,
"error": error_message,
"timestamp": datetime.now().isoformat()
}
if row_data:
context["row_data"] = row_data
self.warning(f"Import error at line {line_number}", **context)
def log_import_complete(self, processed: int, errors: int, duration_seconds: float):
"""Log completion of import operation."""
context = {
"total_processed": processed,
"total_errors": errors,
"duration_seconds": duration_seconds,
"records_per_second": round(processed / max(duration_seconds, 0.1), 2),
"timestamp": datetime.now().isoformat()
}
self.info(f"Import completed: {processed} processed, {errors} errors", **context)
class SecurityLogger(StructuredLogger):
"""Specialized logger for security events."""
def __init__(self):
super().__init__("security", "WARNING")
def log_auth_attempt(self, username: str, success: bool, ip_address: str = None):
"""Log authentication attempts."""
context = {
"username": username,
"success": success,
"ip_address": ip_address,
"timestamp": datetime.now().isoformat()
}
if success:
self.info("Successful authentication", **context)
else:
self.warning("Failed authentication attempt", **context)
def log_permission_denied(self, username: str, resource: str, action: str):
"""Log permission denied events."""
context = {
"username": username,
"resource": resource,
"action": action,
"timestamp": datetime.now().isoformat()
}
self.warning("Permission denied", **context)
def log_security_event(self, event_type: str, details: Dict[str, Any]):
"""Log general security events."""
context = {
"event_type": event_type,
"timestamp": datetime.now().isoformat(),
**details
}
self.warning(f"Security event: {event_type}", **context)
class DatabaseLogger(StructuredLogger):
"""Specialized logger for database operations."""
def __init__(self):
super().__init__("database", "INFO")
def log_query_performance(self, query_type: str, duration_ms: float, affected_rows: int = None):
"""Log database query performance."""
context = {
"query_type": query_type,
"duration_ms": duration_ms,
"timestamp": datetime.now().isoformat()
}
if affected_rows is not None:
context["affected_rows"] = affected_rows
if duration_ms > 1000: # Log slow queries as warnings
self.warning(f"Slow query detected: {query_type}", **context)
else:
self.debug(f"Query executed: {query_type}", **context)
def log_transaction_event(self, event_type: str, details: Dict[str, Any] = None):
"""Log database transaction events."""
context = {
"event_type": event_type,
"timestamp": datetime.now().isoformat()
}
if details:
context.update(details)
self.info(f"Transaction {event_type}", **context)
def log_function_call(logger: StructuredLogger = None, level: str = "DEBUG"):
"""
Decorator to log function calls with parameters and execution time.
Args:
logger: Logger instance to use (creates default if None)
level: Log level to use
"""
def decorator(func):
nonlocal logger
if logger is None:
logger = StructuredLogger(func.__module__, level)
@wraps(func)
def wrapper(*args, **kwargs):
start_time = datetime.now()
# Log function entry
context = {
"function": func.__name__,
"args_count": len(args),
"kwargs": list(kwargs.keys()),
"start_time": start_time.isoformat()
}
logger.debug(f"Entering function {func.__name__}", **context)
try:
result = func(*args, **kwargs)
# Log successful completion
duration = (datetime.now() - start_time).total_seconds()
logger.debug(
f"Function {func.__name__} completed successfully",
duration_seconds=duration,
function=func.__name__
)
return result
except Exception as e:
# Log error
duration = (datetime.now() - start_time).total_seconds()
logger.error(
f"Function {func.__name__} failed with error: {str(e)}",
duration_seconds=duration,
function=func.__name__,
error_type=type(e).__name__
)
raise
return wrapper
return decorator
# Pre-configured logger instances
app_logger = StructuredLogger("application")
import_logger = ImportLogger()
security_logger = SecurityLogger()
database_logger = DatabaseLogger()
# Convenience functions
def log_info(message: str, **kwargs):
"""Quick info logging."""
app_logger.info(message, **kwargs)
def log_warning(message: str, **kwargs):
"""Quick warning logging."""
app_logger.warning(message, **kwargs)
def log_error(message: str, **kwargs):
"""Quick error logging."""
app_logger.error(message, **kwargs)
def log_debug(message: str, **kwargs):
"""Quick debug logging."""
app_logger.debug(message, **kwargs)

346
app/utils/responses.py Normal file
View File

@@ -0,0 +1,346 @@
"""
Comprehensive error response schemas for consistent API responses.
"""
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field
from enum import Enum
from datetime import datetime
class ErrorCode(str, Enum):
"""Standard error codes for the application."""
# Validation errors
VALIDATION_ERROR = "validation_error"
FIELD_REQUIRED = "field_required"
FIELD_INVALID = "field_invalid"
# Authentication & Authorization
UNAUTHORIZED = "unauthorized"
FORBIDDEN = "forbidden"
TOKEN_EXPIRED = "token_expired"
TOKEN_INVALID = "token_invalid"
# Resource errors
NOT_FOUND = "not_found"
ALREADY_EXISTS = "already_exists"
CONFLICT = "conflict"
# Database errors
DATABASE_ERROR = "database_error"
INTEGRITY_ERROR = "integrity_error"
TRANSACTION_ERROR = "transaction_error"
# Business logic errors
BUSINESS_RULE_VIOLATION = "business_rule_violation"
INVALID_OPERATION = "invalid_operation"
# System errors
INTERNAL_ERROR = "internal_error"
SERVICE_UNAVAILABLE = "service_unavailable"
TIMEOUT = "timeout"
# File & Import errors
FILE_TOO_LARGE = "file_too_large"
INVALID_FILE_FORMAT = "invalid_file_format"
IMPORT_ERROR = "import_error"
# Security errors
SECURITY_VIOLATION = "security_violation"
RATE_LIMITED = "rate_limited"
class ErrorDetail(BaseModel):
"""Individual error detail with context."""
code: ErrorCode = Field(..., description="Specific error code")
message: str = Field(..., description="Human-readable error message")
field: Optional[str] = Field(None, description="Field name if field-specific error")
context: Optional[Dict[str, Any]] = Field(None, description="Additional error context")
class ValidationErrorDetail(ErrorDetail):
"""Specialized error detail for validation errors."""
field: str = Field(..., description="Field that failed validation")
input_value: Optional[Any] = Field(None, description="Value that failed validation")
constraint: Optional[str] = Field(None, description="Validation constraint that failed")
class ErrorResponse(BaseModel):
"""Standard error response schema."""
success: bool = Field(False, description="Always false for error responses")
error: ErrorDetail = Field(..., description="Primary error information")
errors: Optional[List[ErrorDetail]] = Field(None, description="Additional errors if multiple")
timestamp: datetime = Field(default_factory=datetime.utcnow, description="Error timestamp")
request_id: Optional[str] = Field(None, description="Request identifier for tracing")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
class ValidationErrorResponse(ErrorResponse):
"""Specialized error response for validation failures."""
error: ValidationErrorDetail = Field(..., description="Primary validation error")
errors: Optional[List[ValidationErrorDetail]] = Field(None, description="Additional validation errors")
class SuccessResponse(BaseModel):
"""Standard success response schema."""
success: bool = Field(True, description="Always true for success responses")
data: Optional[Any] = Field(None, description="Response data")
message: Optional[str] = Field(None, description="Optional success message")
timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response timestamp")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
class PaginatedResponse(SuccessResponse):
"""Standard paginated response schema."""
data: List[Any] = Field(..., description="Paginated data items")
pagination: Dict[str, Any] = Field(..., description="Pagination metadata")
@classmethod
def create(
cls,
items: List[Any],
total: int,
page: int,
page_size: int,
message: Optional[str] = None
) -> "PaginatedResponse":
"""Create a paginated response with standard metadata."""
total_pages = (total + page_size - 1) // page_size
has_next = page < total_pages
has_prev = page > 1
pagination = {
"total": total,
"page": page,
"page_size": page_size,
"total_pages": total_pages,
"has_next": has_next,
"has_prev": has_prev,
"next_page": page + 1 if has_next else None,
"prev_page": page - 1 if has_prev else None
}
return cls(
data=items,
pagination=pagination,
message=message
)
class BulkOperationResponse(SuccessResponse):
"""Response for bulk operations with detailed results."""
data: Dict[str, Any] = Field(..., description="Bulk operation results")
@classmethod
def create(
cls,
total_processed: int,
successful: int,
failed: int,
errors: Optional[List[ErrorDetail]] = None,
message: Optional[str] = None
) -> "BulkOperationResponse":
"""Create a bulk operation response with standard metadata."""
data = {
"total_processed": total_processed,
"successful": successful,
"failed": failed,
"success_rate": (successful / total_processed * 100) if total_processed > 0 else 0
}
if errors:
data["errors"] = [error.dict() for error in errors]
return cls(
data=data,
message=message or f"Processed {total_processed} items: {successful} successful, {failed} failed"
)
def create_error_response(
code: ErrorCode,
message: str,
field: Optional[str] = None,
context: Optional[Dict[str, Any]] = None,
request_id: Optional[str] = None
) -> ErrorResponse:
"""Helper function to create standardized error responses."""
error_detail = ErrorDetail(
code=code,
message=message,
field=field,
context=context
)
return ErrorResponse(
error=error_detail,
request_id=request_id
)
def create_validation_error_response(
field: str,
message: str,
input_value: Optional[Any] = None,
constraint: Optional[str] = None,
additional_errors: Optional[List[ValidationErrorDetail]] = None,
request_id: Optional[str] = None
) -> ValidationErrorResponse:
"""Helper function to create validation error responses."""
primary_error = ValidationErrorDetail(
code=ErrorCode.VALIDATION_ERROR,
message=message,
field=field,
input_value=input_value,
constraint=constraint
)
return ValidationErrorResponse(
error=primary_error,
errors=additional_errors,
request_id=request_id
)
def create_success_response(
data: Optional[Any] = None,
message: Optional[str] = None
) -> SuccessResponse:
"""Helper function to create standardized success responses."""
return SuccessResponse(
data=data,
message=message
)
def create_not_found_response(
resource: str,
identifier: Optional[str] = None,
request_id: Optional[str] = None
) -> ErrorResponse:
"""Helper function to create not found error responses."""
message = f"{resource} not found"
if identifier:
message += f" with identifier: {identifier}"
return create_error_response(
code=ErrorCode.NOT_FOUND,
message=message,
context={"resource": resource, "identifier": identifier},
request_id=request_id
)
def create_conflict_response(
resource: str,
reason: str,
request_id: Optional[str] = None
) -> ErrorResponse:
"""Helper function to create conflict error responses."""
return create_error_response(
code=ErrorCode.CONFLICT,
message=f"Conflict with {resource}: {reason}",
context={"resource": resource, "reason": reason},
request_id=request_id
)
def create_unauthorized_response(
reason: Optional[str] = None,
request_id: Optional[str] = None
) -> ErrorResponse:
"""Helper function to create unauthorized error responses."""
message = "Authentication required"
if reason:
message += f": {reason}"
return create_error_response(
code=ErrorCode.UNAUTHORIZED,
message=message,
context={"reason": reason},
request_id=request_id
)
def create_forbidden_response(
action: Optional[str] = None,
resource: Optional[str] = None,
request_id: Optional[str] = None
) -> ErrorResponse:
"""Helper function to create forbidden error responses."""
message = "Access denied"
context = {}
if action and resource:
message += f": insufficient permissions to {action} {resource}"
context = {"action": action, "resource": resource}
elif action:
message += f": insufficient permissions to {action}"
context = {"action": action}
elif resource:
message += f": insufficient permissions for {resource}"
context = {"resource": resource}
return create_error_response(
code=ErrorCode.FORBIDDEN,
message=message,
context=context,
request_id=request_id
)
# HTTP status code mapping for error codes
ERROR_CODE_STATUS_MAP = {
ErrorCode.VALIDATION_ERROR: 422,
ErrorCode.FIELD_REQUIRED: 422,
ErrorCode.FIELD_INVALID: 422,
ErrorCode.UNAUTHORIZED: 401,
ErrorCode.FORBIDDEN: 403,
ErrorCode.TOKEN_EXPIRED: 401,
ErrorCode.TOKEN_INVALID: 401,
ErrorCode.NOT_FOUND: 404,
ErrorCode.ALREADY_EXISTS: 409,
ErrorCode.CONFLICT: 409,
ErrorCode.DATABASE_ERROR: 500,
ErrorCode.INTEGRITY_ERROR: 400,
ErrorCode.TRANSACTION_ERROR: 500,
ErrorCode.BUSINESS_RULE_VIOLATION: 400,
ErrorCode.INVALID_OPERATION: 400,
ErrorCode.INTERNAL_ERROR: 500,
ErrorCode.SERVICE_UNAVAILABLE: 503,
ErrorCode.TIMEOUT: 504,
ErrorCode.FILE_TOO_LARGE: 413,
ErrorCode.INVALID_FILE_FORMAT: 400,
ErrorCode.IMPORT_ERROR: 400,
ErrorCode.SECURITY_VIOLATION: 403,
ErrorCode.RATE_LIMITED: 429,
}
def get_status_code_for_error(error_code: ErrorCode) -> int:
"""Get appropriate HTTP status code for an error code."""
return ERROR_CODE_STATUS_MAP.get(error_code, 500)

432
app/utils/security.py Normal file
View File

@@ -0,0 +1,432 @@
"""
Security audit utility for credential validation and security best practices.
"""
import re
import hashlib
import secrets
from typing import List, Dict, Any, Optional, Tuple
from pathlib import Path
from dataclasses import dataclass
from enum import Enum
import ast
from app.utils.logging import security_logger
class SecurityLevel(Enum):
"""Security issue severity levels."""
CRITICAL = "critical"
HIGH = "high"
MEDIUM = "medium"
LOW = "low"
INFO = "info"
@dataclass
class SecurityFinding:
"""Represents a security finding from code analysis."""
file_path: str
line_number: int
issue_type: str
severity: SecurityLevel
description: str
recommendation: str
code_snippet: str = ""
class CredentialValidator:
"""Utility for detecting hardcoded credentials and security issues."""
# Patterns for detecting potential hardcoded credentials
CREDENTIAL_PATTERNS = {
'password': [
r'password\s*=\s*["\'][^"\']+["\']',
r'passwd\s*=\s*["\'][^"\']+["\']',
r'pwd\s*=\s*["\'][^"\']+["\']',
],
'api_key': [
r'api_key\s*=\s*["\'][^"\']+["\']',
r'apikey\s*=\s*["\'][^"\']+["\']',
r'key\s*=\s*["\'][A-Za-z0-9]{20,}["\']',
],
'token': [
r'token\s*=\s*["\'][^"\']+["\']',
r'access_token\s*=\s*["\'][^"\']+["\']',
r'auth_token\s*=\s*["\'][^"\']+["\']',
],
'secret': [
r'secret\s*=\s*["\'][^"\']+["\']',
r'secret_key\s*=\s*["\'][^"\']+["\']',
r'client_secret\s*=\s*["\'][^"\']+["\']',
],
'database_url': [
r'database_url\s*=\s*["\'][^"\']*://[^"\']+["\']',
r'db_url\s*=\s*["\'][^"\']*://[^"\']+["\']',
r'connection_string\s*=\s*["\'][^"\']*://[^"\']+["\']',
],
'private_key': [
r'private_key\s*=\s*["\'][^"\']+["\']',
r'-----BEGIN\s+(?:RSA\s+)?PRIVATE\s+KEY-----',
]
}
# Safe patterns that should not be flagged
SAFE_PATTERNS = [
r'password\s*=\s*["\']os\.getenv\(',
r'password\s*=\s*["\']settings\.',
r'password\s*=\s*["\']config\.',
r'password\s*=\s*["\']env\.',
r'password\s*=\s*["\'].*\{\}.*["\']', # Template strings
r'password\s*=\s*["\'].*%s.*["\']', # Format strings
]
# Patterns for other security issues
SECURITY_PATTERNS = {
'sql_injection': [
r'\.execute\s*\(\s*["\'][^"\']*\+[^"\']*["\']', # String concatenation in SQL
r'\.execute\s*\(\s*f["\'][^"\']*\{[^}]+\}[^"\']*["\']', # f-string in SQL
r'\.execute\s*\(\s*["\'][^"\']*%[^"\']*["\']', # % formatting in SQL
],
'hardcoded_ip': [
r'["\'](?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)["\']',
],
'debug_mode': [
r'debug\s*=\s*True',
r'DEBUG\s*=\s*True',
],
'weak_crypto': [
r'hashlib\.md5\(',
r'hashlib\.sha1\(',
]
}
def __init__(self):
self.findings: List[SecurityFinding] = []
def scan_file(self, file_path: Path) -> List[SecurityFinding]:
"""
Scan a single file for security issues.
Args:
file_path: Path to the file to scan
Returns:
List of security findings
"""
findings = []
try:
with open(file_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
for line_num, line in enumerate(lines, 1):
# Check for credential patterns
for cred_type, patterns in self.CREDENTIAL_PATTERNS.items():
for pattern in patterns:
if re.search(pattern, line, re.IGNORECASE):
# Check if it's a safe pattern
is_safe = any(re.search(safe_pattern, line, re.IGNORECASE)
for safe_pattern in self.SAFE_PATTERNS)
if not is_safe:
findings.append(SecurityFinding(
file_path=str(file_path),
line_number=line_num,
issue_type=f"hardcoded_{cred_type}",
severity=SecurityLevel.CRITICAL,
description=f"Potential hardcoded {cred_type} detected",
recommendation=f"Move {cred_type} to environment variables or secure configuration",
code_snippet=line.strip()
))
# Check for other security patterns
for issue_type, patterns in self.SECURITY_PATTERNS.items():
for pattern in patterns:
if re.search(pattern, line, re.IGNORECASE):
severity = self._get_severity_for_issue(issue_type)
findings.append(SecurityFinding(
file_path=str(file_path),
line_number=line_num,
issue_type=issue_type,
severity=severity,
description=self._get_description_for_issue(issue_type),
recommendation=self._get_recommendation_for_issue(issue_type),
code_snippet=line.strip()
))
except Exception as e:
security_logger.error(f"Error scanning file {file_path}: {str(e)}")
return findings
def scan_directory(self, directory_path: Path, file_pattern: str = "*.py") -> List[SecurityFinding]:
"""
Scan all files in a directory for security issues.
Args:
directory_path: Path to the directory to scan
file_pattern: File pattern to match (default: *.py)
Returns:
List of all security findings
"""
all_findings = []
try:
for file_path in directory_path.rglob(file_pattern):
if file_path.is_file():
findings = self.scan_file(file_path)
all_findings.extend(findings)
except Exception as e:
security_logger.error(f"Error scanning directory {directory_path}: {str(e)}")
return all_findings
def _get_severity_for_issue(self, issue_type: str) -> SecurityLevel:
"""Get severity level for an issue type."""
severity_map = {
'sql_injection': SecurityLevel.CRITICAL,
'hardcoded_ip': SecurityLevel.MEDIUM,
'debug_mode': SecurityLevel.HIGH,
'weak_crypto': SecurityLevel.MEDIUM,
}
return severity_map.get(issue_type, SecurityLevel.LOW)
def _get_description_for_issue(self, issue_type: str) -> str:
"""Get description for an issue type."""
descriptions = {
'sql_injection': "Potential SQL injection vulnerability detected",
'hardcoded_ip': "Hardcoded IP address found",
'debug_mode': "Debug mode enabled in production code",
'weak_crypto': "Weak cryptographic algorithm detected",
}
return descriptions.get(issue_type, f"Security issue: {issue_type}")
def _get_recommendation_for_issue(self, issue_type: str) -> str:
"""Get recommendation for an issue type."""
recommendations = {
'sql_injection': "Use parameterized queries or ORM methods to prevent SQL injection",
'hardcoded_ip': "Move IP addresses to configuration files or environment variables",
'debug_mode': "Set debug mode via environment variables, default to False in production",
'weak_crypto': "Use stronger cryptographic algorithms (SHA-256 or better)",
}
return recommendations.get(issue_type, "Review and address this security concern")
def generate_report(self, findings: List[SecurityFinding]) -> Dict[str, Any]:
"""
Generate a security report from findings.
Args:
findings: List of security findings
Returns:
Dictionary containing security report
"""
report = {
'total_issues': len(findings),
'by_severity': {},
'by_type': {},
'files_affected': set(),
'critical_issues': [],
'recommendations': []
}
# Count by severity
for severity in SecurityLevel:
count = len([f for f in findings if f.severity == severity])
if count > 0:
report['by_severity'][severity.value] = count
# Count by type
for finding in findings:
if finding.issue_type not in report['by_type']:
report['by_type'][finding.issue_type] = 0
report['by_type'][finding.issue_type] += 1
report['files_affected'].add(finding.file_path)
if finding.severity in [SecurityLevel.CRITICAL, SecurityLevel.HIGH]:
report['critical_issues'].append({
'file': finding.file_path,
'line': finding.line_number,
'type': finding.issue_type,
'severity': finding.severity.value,
'description': finding.description
})
report['files_affected'] = list(report['files_affected'])
# Generate summary recommendations
if report['by_type']:
report['recommendations'] = self._generate_recommendations(report['by_type'])
return report
def _generate_recommendations(self, issues_by_type: Dict[str, int]) -> List[str]:
"""Generate summary recommendations based on issue types found."""
recommendations = []
if any('hardcoded' in issue_type for issue_type in issues_by_type):
recommendations.append(
"Implement a secure configuration management system using environment variables or encrypted config files"
)
if 'sql_injection' in issues_by_type:
recommendations.append(
"Review all database queries and ensure parameterized queries are used consistently"
)
if 'debug_mode' in issues_by_type:
recommendations.append(
"Implement environment-based configuration for debug settings"
)
if 'weak_crypto' in issues_by_type:
recommendations.append(
"Upgrade cryptographic implementations to use stronger algorithms"
)
return recommendations
class PasswordStrengthValidator:
"""Utility for validating password strength and generating secure passwords."""
def __init__(self):
self.min_length = 8
self.require_uppercase = True
self.require_lowercase = True
self.require_digits = True
self.require_special = True
def validate_password_strength(self, password: str) -> Tuple[bool, List[str]]:
"""
Validate password strength.
Args:
password: Password to validate
Returns:
Tuple of (is_valid, list_of_issues)
"""
issues = []
if len(password) < self.min_length:
issues.append(f"Password must be at least {self.min_length} characters long")
if self.require_uppercase and not re.search(r'[A-Z]', password):
issues.append("Password must contain at least one uppercase letter")
if self.require_lowercase and not re.search(r'[a-z]', password):
issues.append("Password must contain at least one lowercase letter")
if self.require_digits and not re.search(r'\d', password):
issues.append("Password must contain at least one digit")
if self.require_special and not re.search(r'[!@#$%^&*(),.?":{}|<>]', password):
issues.append("Password must contain at least one special character")
return len(issues) == 0, issues
def generate_secure_password(self, length: int = 16) -> str:
"""
Generate a cryptographically secure password.
Args:
length: Length of password to generate
Returns:
Secure password string
"""
import string
# Define character sets
lowercase = string.ascii_lowercase
uppercase = string.ascii_uppercase
digits = string.digits
special = "!@#$%^&*(),.?\":{}|<>"
# Ensure at least one character from each required set
password_chars = []
if self.require_lowercase:
password_chars.append(secrets.choice(lowercase))
if self.require_uppercase:
password_chars.append(secrets.choice(uppercase))
if self.require_digits:
password_chars.append(secrets.choice(digits))
if self.require_special:
password_chars.append(secrets.choice(special))
# Fill remaining length with random characters from all sets
all_chars = lowercase + uppercase + digits + special
for _ in range(length - len(password_chars)):
password_chars.append(secrets.choice(all_chars))
# Shuffle the password characters
secrets.SystemRandom().shuffle(password_chars)
return ''.join(password_chars)
def audit_code_security(directory_path: str, file_pattern: str = "*.py") -> Dict[str, Any]:
"""
Perform a comprehensive security audit of code in a directory.
Args:
directory_path: Path to the directory to audit
file_pattern: File pattern to match (default: *.py)
Returns:
Security audit report
"""
validator = CredentialValidator()
path = Path(directory_path)
security_logger.info(f"Starting security audit of {directory_path}")
findings = validator.scan_directory(path, file_pattern)
report = validator.generate_report(findings)
security_logger.info(f"Security audit completed", **{
"total_issues": report['total_issues'],
"files_scanned": len(report['files_affected']),
"critical_issues": len(report['critical_issues'])
})
return report
def hash_password_securely(password: str) -> str:
"""
Hash a password using a secure algorithm.
Args:
password: Plain text password
Returns:
Securely hashed password
"""
import bcrypt
# Generate salt and hash password
salt = bcrypt.gensalt()
hashed = bcrypt.hashpw(password.encode('utf-8'), salt)
return hashed.decode('utf-8')
def verify_password(password: str, hashed: str) -> bool:
"""
Verify a password against its hash.
Args:
password: Plain text password
hashed: Hashed password
Returns:
True if password matches, False otherwise
"""
import bcrypt
return bcrypt.checkpw(password.encode('utf-8'), hashed.encode('utf-8'))