changes
This commit is contained in:
668
app/utils/enhanced_audit.py
Normal file
668
app/utils/enhanced_audit.py
Normal file
@@ -0,0 +1,668 @@
|
||||
"""
|
||||
Enhanced audit logging utilities for P2 security features
|
||||
"""
|
||||
import uuid
|
||||
import json
|
||||
import hashlib
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, Dict, Any, List, Union
|
||||
from contextlib import contextmanager
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import and_, or_, func
|
||||
from fastapi import Request
|
||||
from user_agents import parse as parse_user_agent
|
||||
|
||||
from app.models.audit_enhanced import (
|
||||
EnhancedAuditLog, SecurityAlert, ComplianceReport,
|
||||
AuditRetentionPolicy, SIEMIntegration,
|
||||
SecurityEventType, SecurityEventSeverity, ComplianceStandard
|
||||
)
|
||||
from app.models.user import User
|
||||
from app.utils.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class EnhancedAuditLogger:
|
||||
"""
|
||||
Enhanced audit logging system with security event tracking
|
||||
"""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def log_security_event(
|
||||
self,
|
||||
event_type: SecurityEventType,
|
||||
title: str,
|
||||
description: str,
|
||||
user: Optional[User] = None,
|
||||
session_id: Optional[str] = None,
|
||||
request: Optional[Request] = None,
|
||||
severity: SecurityEventSeverity = SecurityEventSeverity.INFO,
|
||||
outcome: str = "success",
|
||||
resource_type: Optional[str] = None,
|
||||
resource_id: Optional[str] = None,
|
||||
resource_name: Optional[str] = None,
|
||||
data_before: Optional[Dict[str, Any]] = None,
|
||||
data_after: Optional[Dict[str, Any]] = None,
|
||||
risk_factors: Optional[List[str]] = None,
|
||||
threat_indicators: Optional[List[str]] = None,
|
||||
compliance_standards: Optional[List[ComplianceStandard]] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
custom_fields: Optional[Dict[str, Any]] = None,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> EnhancedAuditLog:
|
||||
"""
|
||||
Log a comprehensive security event
|
||||
"""
|
||||
# Generate unique event ID
|
||||
event_id = str(uuid.uuid4())
|
||||
|
||||
# Extract request metadata
|
||||
source_ip = None
|
||||
user_agent = None
|
||||
endpoint = None
|
||||
http_method = None
|
||||
request_id = None
|
||||
|
||||
if request:
|
||||
source_ip = self._get_client_ip(request)
|
||||
user_agent = request.headers.get("user-agent", "")
|
||||
endpoint = str(request.url.path)
|
||||
http_method = request.method
|
||||
request_id = getattr(request.state, 'request_id', None)
|
||||
|
||||
# Determine event category
|
||||
event_category = self._categorize_event(event_type)
|
||||
|
||||
# Calculate risk score
|
||||
risk_score = self._calculate_risk_score(
|
||||
event_type, severity, risk_factors, threat_indicators
|
||||
)
|
||||
|
||||
# Get geographic info (placeholder - would integrate with GeoIP)
|
||||
country, region, city = self._get_geographic_info(source_ip)
|
||||
|
||||
# Create audit log entry
|
||||
audit_log = EnhancedAuditLog(
|
||||
event_id=event_id,
|
||||
event_type=event_type.value,
|
||||
event_category=event_category,
|
||||
severity=severity.value,
|
||||
title=title,
|
||||
description=description,
|
||||
outcome=outcome,
|
||||
user_id=user.id if user else None,
|
||||
session_id=session_id,
|
||||
source_ip=source_ip,
|
||||
user_agent=user_agent,
|
||||
request_id=request_id,
|
||||
country=country,
|
||||
region=region,
|
||||
city=city,
|
||||
endpoint=endpoint,
|
||||
http_method=http_method,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
resource_name=resource_name,
|
||||
risk_score=risk_score,
|
||||
correlation_id=correlation_id or str(uuid.uuid4())
|
||||
)
|
||||
|
||||
# Set JSON data
|
||||
if data_before:
|
||||
audit_log.set_data_before(data_before)
|
||||
if data_after:
|
||||
audit_log.set_data_after(data_after)
|
||||
if risk_factors:
|
||||
audit_log.set_risk_factors(risk_factors)
|
||||
if threat_indicators:
|
||||
audit_log.set_threat_indicators(threat_indicators)
|
||||
if compliance_standards:
|
||||
audit_log.set_compliance_standards([std.value for std in compliance_standards])
|
||||
if tags:
|
||||
audit_log.set_tags(tags)
|
||||
if custom_fields:
|
||||
audit_log.set_custom_fields(custom_fields)
|
||||
|
||||
# Save to database
|
||||
self.db.add(audit_log)
|
||||
self.db.flush() # Get ID for further processing
|
||||
|
||||
# Check for security alerts
|
||||
self._check_security_alerts(audit_log)
|
||||
|
||||
# Send to SIEM systems
|
||||
self._send_to_siem(audit_log)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Security event logged: {event_type.value}",
|
||||
extra={
|
||||
"event_id": event_id,
|
||||
"user_id": user.id if user else None,
|
||||
"severity": severity.value,
|
||||
"risk_score": risk_score
|
||||
}
|
||||
)
|
||||
|
||||
return audit_log
|
||||
|
||||
def log_data_access(
|
||||
self,
|
||||
user: User,
|
||||
resource_type: str,
|
||||
resource_id: str,
|
||||
action: str, # read, write, delete, export
|
||||
request: Optional[Request] = None,
|
||||
session_id: Optional[str] = None,
|
||||
record_count: Optional[int] = None,
|
||||
data_volume: Optional[int] = None,
|
||||
compliance_standards: Optional[List[ComplianceStandard]] = None
|
||||
) -> EnhancedAuditLog:
|
||||
"""
|
||||
Log data access events for compliance
|
||||
"""
|
||||
event_type_map = {
|
||||
"read": SecurityEventType.DATA_READ,
|
||||
"write": SecurityEventType.DATA_WRITE,
|
||||
"delete": SecurityEventType.DATA_DELETE,
|
||||
"export": SecurityEventType.DATA_EXPORT
|
||||
}
|
||||
|
||||
event_type = event_type_map.get(action, SecurityEventType.DATA_READ)
|
||||
|
||||
return self.log_security_event(
|
||||
event_type=event_type,
|
||||
title=f"Data {action} operation",
|
||||
description=f"User {user.username} performed {action} on {resource_type} {resource_id}",
|
||||
user=user,
|
||||
session_id=session_id,
|
||||
request=request,
|
||||
severity=SecurityEventSeverity.INFO,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
compliance_standards=compliance_standards or [ComplianceStandard.SOX],
|
||||
custom_fields={
|
||||
"record_count": record_count,
|
||||
"data_volume": data_volume
|
||||
}
|
||||
)
|
||||
|
||||
def log_authentication_event(
|
||||
self,
|
||||
event_type: SecurityEventType,
|
||||
username: str,
|
||||
request: Request,
|
||||
user: Optional[User] = None,
|
||||
session_id: Optional[str] = None,
|
||||
outcome: str = "success",
|
||||
details: Optional[str] = None,
|
||||
risk_factors: Optional[List[str]] = None
|
||||
) -> EnhancedAuditLog:
|
||||
"""
|
||||
Log authentication-related events
|
||||
"""
|
||||
severity = SecurityEventSeverity.INFO
|
||||
if outcome == "failure" or risk_factors:
|
||||
severity = SecurityEventSeverity.MEDIUM
|
||||
if event_type == SecurityEventType.ACCOUNT_LOCKED:
|
||||
severity = SecurityEventSeverity.HIGH
|
||||
|
||||
return self.log_security_event(
|
||||
event_type=event_type,
|
||||
title=f"Authentication event: {event_type.value}",
|
||||
description=details or f"Authentication {outcome} for user {username}",
|
||||
user=user,
|
||||
session_id=session_id,
|
||||
request=request,
|
||||
severity=severity,
|
||||
outcome=outcome,
|
||||
risk_factors=risk_factors,
|
||||
compliance_standards=[ComplianceStandard.SOX, ComplianceStandard.ISO27001]
|
||||
)
|
||||
|
||||
def log_admin_action(
|
||||
self,
|
||||
admin_user: User,
|
||||
action: str,
|
||||
target_resource: str,
|
||||
request: Request,
|
||||
session_id: Optional[str] = None,
|
||||
data_before: Optional[Dict[str, Any]] = None,
|
||||
data_after: Optional[Dict[str, Any]] = None,
|
||||
affected_user_id: Optional[int] = None
|
||||
) -> EnhancedAuditLog:
|
||||
"""
|
||||
Log administrative actions for compliance
|
||||
"""
|
||||
return self.log_security_event(
|
||||
event_type=SecurityEventType.CONFIGURATION_CHANGE,
|
||||
title=f"Administrative action: {action}",
|
||||
description=f"Admin {admin_user.username} performed {action} on {target_resource}",
|
||||
user=admin_user,
|
||||
session_id=session_id,
|
||||
request=request,
|
||||
severity=SecurityEventSeverity.MEDIUM,
|
||||
resource_type="admin",
|
||||
resource_id=target_resource,
|
||||
data_before=data_before,
|
||||
data_after=data_after,
|
||||
compliance_standards=[ComplianceStandard.SOX, ComplianceStandard.SOC2],
|
||||
tags=["admin_action", "configuration_change"],
|
||||
custom_fields={
|
||||
"affected_user_id": affected_user_id
|
||||
}
|
||||
)
|
||||
|
||||
def create_security_alert(
|
||||
self,
|
||||
rule_id: str,
|
||||
rule_name: str,
|
||||
title: str,
|
||||
description: str,
|
||||
severity: SecurityEventSeverity,
|
||||
triggering_events: List[str],
|
||||
confidence: int = 100,
|
||||
time_window_minutes: Optional[int] = None,
|
||||
affected_users: Optional[List[int]] = None,
|
||||
affected_resources: Optional[List[str]] = None
|
||||
) -> SecurityAlert:
|
||||
"""
|
||||
Create a security alert based on detected patterns
|
||||
"""
|
||||
alert_id = str(uuid.uuid4())
|
||||
|
||||
alert = SecurityAlert(
|
||||
alert_id=alert_id,
|
||||
rule_id=rule_id,
|
||||
rule_name=rule_name,
|
||||
title=title,
|
||||
description=description,
|
||||
severity=severity.value,
|
||||
confidence=confidence,
|
||||
event_count=len(triggering_events),
|
||||
time_window_minutes=time_window_minutes,
|
||||
first_seen=datetime.now(timezone.utc),
|
||||
last_seen=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
# Set JSON fields
|
||||
alert.triggering_events = json.dumps(triggering_events)
|
||||
if affected_users:
|
||||
alert.affected_users = json.dumps(affected_users)
|
||||
if affected_resources:
|
||||
alert.affected_resources = json.dumps(affected_resources)
|
||||
|
||||
self.db.add(alert)
|
||||
self.db.commit()
|
||||
|
||||
logger.warning(
|
||||
f"Security alert created: {title}",
|
||||
extra={
|
||||
"alert_id": alert_id,
|
||||
"severity": severity.value,
|
||||
"confidence": confidence,
|
||||
"event_count": len(triggering_events)
|
||||
}
|
||||
)
|
||||
|
||||
return alert
|
||||
|
||||
def search_audit_logs(
|
||||
self,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
event_types: Optional[List[SecurityEventType]] = None,
|
||||
severities: Optional[List[SecurityEventSeverity]] = None,
|
||||
user_ids: Optional[List[int]] = None,
|
||||
source_ips: Optional[List[str]] = None,
|
||||
resource_types: Optional[List[str]] = None,
|
||||
outcomes: Optional[List[str]] = None,
|
||||
min_risk_score: Optional[int] = None,
|
||||
correlation_id: Optional[str] = None,
|
||||
limit: int = 1000,
|
||||
offset: int = 0
|
||||
) -> List[EnhancedAuditLog]:
|
||||
"""
|
||||
Search audit logs with comprehensive filtering
|
||||
"""
|
||||
query = self.db.query(EnhancedAuditLog)
|
||||
|
||||
# Apply filters
|
||||
if start_date:
|
||||
query = query.filter(EnhancedAuditLog.timestamp >= start_date)
|
||||
if end_date:
|
||||
query = query.filter(EnhancedAuditLog.timestamp <= end_date)
|
||||
if event_types:
|
||||
query = query.filter(EnhancedAuditLog.event_type.in_([et.value for et in event_types]))
|
||||
if severities:
|
||||
query = query.filter(EnhancedAuditLog.severity.in_([s.value for s in severities]))
|
||||
if user_ids:
|
||||
query = query.filter(EnhancedAuditLog.user_id.in_(user_ids))
|
||||
if source_ips:
|
||||
query = query.filter(EnhancedAuditLog.source_ip.in_(source_ips))
|
||||
if resource_types:
|
||||
query = query.filter(EnhancedAuditLog.resource_type.in_(resource_types))
|
||||
if outcomes:
|
||||
query = query.filter(EnhancedAuditLog.outcome.in_(outcomes))
|
||||
if min_risk_score is not None:
|
||||
query = query.filter(EnhancedAuditLog.risk_score >= min_risk_score)
|
||||
if correlation_id:
|
||||
query = query.filter(EnhancedAuditLog.correlation_id == correlation_id)
|
||||
|
||||
return query.order_by(EnhancedAuditLog.timestamp.desc()).offset(offset).limit(limit).all()
|
||||
|
||||
def generate_compliance_report(
|
||||
self,
|
||||
standard: ComplianceStandard,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
generated_by: User,
|
||||
report_type: str = "periodic"
|
||||
) -> ComplianceReport:
|
||||
"""
|
||||
Generate compliance report for specified standard and date range
|
||||
"""
|
||||
report_id = str(uuid.uuid4())
|
||||
|
||||
# Query relevant audit logs
|
||||
logs = self.search_audit_logs(
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Filter logs relevant to the compliance standard
|
||||
relevant_logs = [
|
||||
log for log in logs
|
||||
if standard.value in (log.get_compliance_standards() or [])
|
||||
]
|
||||
|
||||
# Calculate metrics
|
||||
total_events = len(relevant_logs)
|
||||
security_events = len([log for log in relevant_logs if log.event_category == "security"])
|
||||
violations = len([log for log in relevant_logs if log.outcome in ["failure", "blocked"]])
|
||||
high_risk_events = len([log for log in relevant_logs if log.risk_score >= 70])
|
||||
|
||||
# Generate report content
|
||||
summary = {
|
||||
"total_events": total_events,
|
||||
"security_events": security_events,
|
||||
"violations": violations,
|
||||
"high_risk_events": high_risk_events,
|
||||
"compliance_percentage": ((total_events - violations) / total_events * 100) if total_events > 0 else 100
|
||||
}
|
||||
|
||||
report = ComplianceReport(
|
||||
report_id=report_id,
|
||||
standard=standard.value,
|
||||
report_type=report_type,
|
||||
title=f"{standard.value.upper()} Compliance Report",
|
||||
description=f"Compliance report for {standard.value.upper()} from {start_date.date()} to {end_date.date()}",
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
summary=json.dumps(summary),
|
||||
total_events=total_events,
|
||||
security_events=security_events,
|
||||
violations=violations,
|
||||
high_risk_events=high_risk_events,
|
||||
generated_by=generated_by.id,
|
||||
status="ready"
|
||||
)
|
||||
|
||||
self.db.add(report)
|
||||
self.db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Compliance report generated: {standard.value}",
|
||||
extra={
|
||||
"report_id": report_id,
|
||||
"total_events": total_events,
|
||||
"violations": violations
|
||||
}
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
def cleanup_old_logs(self) -> int:
|
||||
"""
|
||||
Clean up old audit logs based on retention policies
|
||||
"""
|
||||
# Get active retention policies
|
||||
policies = self.db.query(AuditRetentionPolicy).filter(
|
||||
AuditRetentionPolicy.is_active == True
|
||||
).order_by(AuditRetentionPolicy.priority.desc()).all()
|
||||
|
||||
cleaned_count = 0
|
||||
|
||||
for policy in policies:
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=policy.retention_days)
|
||||
|
||||
# Build query for logs to delete
|
||||
query = self.db.query(EnhancedAuditLog).filter(
|
||||
EnhancedAuditLog.timestamp < cutoff_date
|
||||
)
|
||||
|
||||
# Apply event type filter if specified
|
||||
if policy.event_types:
|
||||
event_types = json.loads(policy.event_types)
|
||||
query = query.filter(EnhancedAuditLog.event_type.in_(event_types))
|
||||
|
||||
# Apply compliance standards filter if specified
|
||||
if policy.compliance_standards:
|
||||
standards = json.loads(policy.compliance_standards)
|
||||
# This is a simplified check - in practice, you'd want more sophisticated filtering
|
||||
for standard in standards:
|
||||
query = query.filter(EnhancedAuditLog.compliance_standards.contains(standard))
|
||||
|
||||
# Delete matching logs
|
||||
count = query.count()
|
||||
query.delete(synchronize_session=False)
|
||||
cleaned_count += count
|
||||
|
||||
logger.info(f"Cleaned {count} logs using policy {policy.policy_name}")
|
||||
|
||||
self.db.commit()
|
||||
return cleaned_count
|
||||
|
||||
def _categorize_event(self, event_type: SecurityEventType) -> str:
|
||||
"""Categorize event type into broader categories"""
|
||||
auth_events = {
|
||||
SecurityEventType.LOGIN_SUCCESS, SecurityEventType.LOGIN_FAILURE,
|
||||
SecurityEventType.LOGOUT, SecurityEventType.SESSION_EXPIRED,
|
||||
SecurityEventType.PASSWORD_CHANGE, SecurityEventType.ACCOUNT_LOCKED
|
||||
}
|
||||
|
||||
security_events = {
|
||||
SecurityEventType.SUSPICIOUS_ACTIVITY, SecurityEventType.ATTACK_DETECTED,
|
||||
SecurityEventType.SECURITY_VIOLATION, SecurityEventType.IP_BLOCKED,
|
||||
SecurityEventType.ACCESS_DENIED, SecurityEventType.UNAUTHORIZED_ACCESS
|
||||
}
|
||||
|
||||
data_events = {
|
||||
SecurityEventType.DATA_READ, SecurityEventType.DATA_WRITE,
|
||||
SecurityEventType.DATA_DELETE, SecurityEventType.DATA_EXPORT,
|
||||
SecurityEventType.BULK_OPERATION
|
||||
}
|
||||
|
||||
if event_type in auth_events:
|
||||
return "authentication"
|
||||
elif event_type in security_events:
|
||||
return "security"
|
||||
elif event_type in data_events:
|
||||
return "data_access"
|
||||
else:
|
||||
return "system"
|
||||
|
||||
def _calculate_risk_score(
|
||||
self,
|
||||
event_type: SecurityEventType,
|
||||
severity: SecurityEventSeverity,
|
||||
risk_factors: Optional[List[str]],
|
||||
threat_indicators: Optional[List[str]]
|
||||
) -> int:
|
||||
"""Calculate risk score for the event"""
|
||||
base_scores = {
|
||||
SecurityEventSeverity.CRITICAL: 80,
|
||||
SecurityEventSeverity.HIGH: 60,
|
||||
SecurityEventSeverity.MEDIUM: 40,
|
||||
SecurityEventSeverity.LOW: 20,
|
||||
SecurityEventSeverity.INFO: 10
|
||||
}
|
||||
|
||||
score = base_scores.get(severity, 10)
|
||||
|
||||
# Add points for risk factors
|
||||
if risk_factors:
|
||||
score += len(risk_factors) * 5
|
||||
|
||||
# Add points for threat indicators
|
||||
if threat_indicators:
|
||||
score += len(threat_indicators) * 10
|
||||
|
||||
# Event type modifiers
|
||||
high_risk_events = {
|
||||
SecurityEventType.ATTACK_DETECTED,
|
||||
SecurityEventType.PRIVILEGE_ESCALATION,
|
||||
SecurityEventType.UNAUTHORIZED_ACCESS
|
||||
}
|
||||
|
||||
if event_type in high_risk_events:
|
||||
score += 20
|
||||
|
||||
return min(score, 100) # Cap at 100
|
||||
|
||||
def _check_security_alerts(self, audit_log: EnhancedAuditLog) -> None:
|
||||
"""Check if audit log should trigger security alerts"""
|
||||
# Example: Multiple failed logins from same IP
|
||||
if audit_log.event_type == SecurityEventType.LOGIN_FAILURE.value:
|
||||
recent_failures = self.db.query(EnhancedAuditLog).filter(
|
||||
and_(
|
||||
EnhancedAuditLog.event_type == SecurityEventType.LOGIN_FAILURE.value,
|
||||
EnhancedAuditLog.source_ip == audit_log.source_ip,
|
||||
EnhancedAuditLog.timestamp >= datetime.now(timezone.utc) - timedelta(minutes=15)
|
||||
)
|
||||
).count()
|
||||
|
||||
if recent_failures >= 5:
|
||||
self.create_security_alert(
|
||||
rule_id="failed_login_threshold",
|
||||
rule_name="Multiple Failed Logins",
|
||||
title=f"Multiple failed logins from {audit_log.source_ip}",
|
||||
description=f"{recent_failures} failed login attempts in 15 minutes",
|
||||
severity=SecurityEventSeverity.HIGH,
|
||||
triggering_events=[audit_log.event_id],
|
||||
time_window_minutes=15
|
||||
)
|
||||
|
||||
# Example: High risk score threshold
|
||||
if audit_log.risk_score >= 80:
|
||||
self.create_security_alert(
|
||||
rule_id="high_risk_event",
|
||||
rule_name="High Risk Security Event",
|
||||
title=f"High risk event detected: {audit_log.title}",
|
||||
description=f"Event with risk score {audit_log.risk_score} detected",
|
||||
severity=SecurityEventSeverity.HIGH,
|
||||
triggering_events=[audit_log.event_id],
|
||||
confidence=audit_log.risk_score
|
||||
)
|
||||
|
||||
def _send_to_siem(self, audit_log: EnhancedAuditLog) -> None:
|
||||
"""Send audit log to configured SIEM systems"""
|
||||
# Get active SIEM integrations
|
||||
integrations = self.db.query(SIEMIntegration).filter(
|
||||
SIEMIntegration.is_active == True
|
||||
).all()
|
||||
|
||||
for integration in integrations:
|
||||
try:
|
||||
# Check if event should be sent based on filters
|
||||
if self._should_send_to_siem(audit_log, integration):
|
||||
# In a real implementation, this would send to the actual SIEM
|
||||
# For now, just log the intent
|
||||
logger.debug(
|
||||
f"Sending event to SIEM {integration.integration_name}",
|
||||
extra={"event_id": audit_log.event_id}
|
||||
)
|
||||
|
||||
# Update statistics
|
||||
integration.events_sent += 1
|
||||
integration.last_sync = datetime.now(timezone.utc)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send to SIEM {integration.integration_name}: {str(e)}")
|
||||
integration.errors_count += 1
|
||||
integration.last_error = str(e)
|
||||
integration.is_healthy = False
|
||||
|
||||
def _should_send_to_siem(self, audit_log: EnhancedAuditLog, integration: SIEMIntegration) -> bool:
|
||||
"""Check if audit log should be sent to specific SIEM integration"""
|
||||
# Check severity threshold
|
||||
severity_order = ["info", "low", "medium", "high", "critical"]
|
||||
if severity_order.index(audit_log.severity) < severity_order.index(integration.severity_threshold):
|
||||
return False
|
||||
|
||||
# Check event type filter
|
||||
if integration.event_types:
|
||||
allowed_types = json.loads(integration.event_types)
|
||||
if audit_log.event_type not in allowed_types:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _get_client_ip(self, request: Request) -> str:
|
||||
"""Extract client IP from request"""
|
||||
forwarded_for = request.headers.get("x-forwarded-for")
|
||||
if forwarded_for:
|
||||
return forwarded_for.split(",")[0].strip()
|
||||
|
||||
real_ip = request.headers.get("x-real-ip")
|
||||
if real_ip:
|
||||
return real_ip
|
||||
|
||||
return request.client.host if request.client else "unknown"
|
||||
|
||||
def _get_geographic_info(self, ip_address: Optional[str]) -> tuple:
|
||||
"""Get geographic information for IP address"""
|
||||
# Placeholder - would integrate with GeoIP service
|
||||
return None, None, None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def audit_context(
|
||||
db: Session,
|
||||
user: Optional[User] = None,
|
||||
session_id: Optional[str] = None,
|
||||
request: Optional[Request] = None,
|
||||
correlation_id: Optional[str] = None
|
||||
):
|
||||
"""Context manager for audit logging"""
|
||||
auditor = EnhancedAuditLogger(db)
|
||||
|
||||
# Set correlation ID for this context
|
||||
if not correlation_id:
|
||||
correlation_id = str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
yield auditor
|
||||
except Exception as e:
|
||||
# Log the exception as a security event
|
||||
auditor.log_security_event(
|
||||
event_type=SecurityEventType.SECURITY_VIOLATION,
|
||||
title="System error occurred",
|
||||
description=f"Exception in audit context: {str(e)}",
|
||||
user=user,
|
||||
session_id=session_id,
|
||||
request=request,
|
||||
severity=SecurityEventSeverity.HIGH,
|
||||
outcome="error",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def get_enhanced_audit_logger(db: Session) -> EnhancedAuditLogger:
|
||||
"""Dependency injection for enhanced audit logger"""
|
||||
return EnhancedAuditLogger(db)
|
||||
Reference in New Issue
Block a user