358 lines
No EOL
12 KiB
Python
358 lines
No EOL
12 KiB
Python
"""
|
|
Logging utilities module.
|
|
|
|
This module provides structured logging utilities with correlation IDs,
|
|
timestamps, and enhanced formatting for debugging CMS backend communication.
|
|
"""
|
|
import logging
|
|
import json
|
|
import uuid
|
|
import time
|
|
from typing import Dict, Any, Optional
|
|
from datetime import datetime
|
|
|
|
# Setup loggers
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class CorrelatedLogger:
|
|
"""
|
|
Logger with correlation ID support for tracking message flows.
|
|
|
|
This class provides structured logging with correlation IDs to help
|
|
track requests/responses and debug communication between CMS backend
|
|
and the worker.
|
|
"""
|
|
|
|
def __init__(self, logger_name: str = "detector_worker.correlated"):
|
|
"""
|
|
Initialize the correlated logger.
|
|
|
|
Args:
|
|
logger_name: Name for the logger
|
|
"""
|
|
self.logger = logging.getLogger(logger_name)
|
|
self.correlation_id: Optional[str] = None
|
|
|
|
def set_correlation_id(self, correlation_id: Optional[str] = None) -> str:
|
|
"""
|
|
Set or generate a correlation ID.
|
|
|
|
Args:
|
|
correlation_id: Specific correlation ID or None to generate
|
|
|
|
Returns:
|
|
The correlation ID that was set
|
|
"""
|
|
if correlation_id is None:
|
|
correlation_id = str(uuid.uuid4())[:8] # Short UUID for readability
|
|
self.correlation_id = correlation_id
|
|
return correlation_id
|
|
|
|
def clear_correlation_id(self) -> None:
|
|
"""Clear the current correlation ID."""
|
|
self.correlation_id = None
|
|
|
|
def _format_message(self, message: str, extra_data: Optional[Dict[str, Any]] = None) -> str:
|
|
"""
|
|
Format a message with correlation ID and timestamp.
|
|
|
|
Args:
|
|
message: Base message
|
|
extra_data: Additional data to include
|
|
|
|
Returns:
|
|
Formatted message with correlation info
|
|
"""
|
|
timestamp = datetime.utcnow().strftime("%H:%M:%S.%f")[:-3] # Include milliseconds
|
|
|
|
parts = [f"[{timestamp}]"]
|
|
|
|
if self.correlation_id:
|
|
parts.append(f"[{self.correlation_id}]")
|
|
|
|
parts.append(message)
|
|
|
|
if extra_data:
|
|
# Format extra data as compact JSON
|
|
try:
|
|
extra_json = json.dumps(extra_data, separators=(',', ':'))
|
|
parts.append(f"| {extra_json}")
|
|
except (TypeError, ValueError):
|
|
parts.append(f"| {str(extra_data)}")
|
|
|
|
return " ".join(parts)
|
|
|
|
def info(self, message: str, extra_data: Optional[Dict[str, Any]] = None) -> None:
|
|
"""Log an info message with correlation."""
|
|
formatted = self._format_message(message, extra_data)
|
|
self.logger.info(formatted)
|
|
|
|
def debug(self, message: str, extra_data: Optional[Dict[str, Any]] = None) -> None:
|
|
"""Log a debug message with correlation."""
|
|
formatted = self._format_message(message, extra_data)
|
|
self.logger.debug(formatted)
|
|
|
|
def warning(self, message: str, extra_data: Optional[Dict[str, Any]] = None) -> None:
|
|
"""Log a warning message with correlation."""
|
|
formatted = self._format_message(message, extra_data)
|
|
self.logger.warning(formatted)
|
|
|
|
def error(self, message: str, extra_data: Optional[Dict[str, Any]] = None) -> None:
|
|
"""Log an error message with correlation."""
|
|
formatted = self._format_message(message, extra_data)
|
|
self.logger.error(formatted)
|
|
|
|
|
|
class WebSocketRXTXLogger:
|
|
"""
|
|
Specialized logger for WebSocket RX/TX communication with CMS backend.
|
|
|
|
This logger provides enhanced debugging for WebSocket messages with
|
|
payload inspection, message types, and correlation tracking.
|
|
"""
|
|
|
|
def __init__(self):
|
|
"""Initialize the WebSocket RX/TX logger."""
|
|
self.logger = logging.getLogger("websocket.rxtx")
|
|
self.correlation_logger = CorrelatedLogger("websocket.rxtx.correlated")
|
|
|
|
def log_rx(self, message_data: str, correlation_id: Optional[str] = None) -> Optional[str]:
|
|
"""
|
|
Log incoming WebSocket message (RX).
|
|
|
|
Args:
|
|
message_data: Raw message data received
|
|
correlation_id: Optional correlation ID for tracking
|
|
|
|
Returns:
|
|
Generated or provided correlation ID
|
|
"""
|
|
if correlation_id:
|
|
self.correlation_logger.set_correlation_id(correlation_id)
|
|
else:
|
|
correlation_id = self.correlation_logger.set_correlation_id()
|
|
|
|
# Basic RX logging
|
|
self.logger.info(f"RX <- {message_data}")
|
|
|
|
# Enhanced correlation logging with payload analysis
|
|
try:
|
|
parsed = json.loads(message_data)
|
|
message_type = parsed.get("type", "unknown")
|
|
|
|
extra_data = {
|
|
"direction": "RX",
|
|
"message_type": message_type,
|
|
"size_bytes": len(message_data)
|
|
}
|
|
|
|
# Add specific payload info for important message types
|
|
if message_type == "setSubscriptionList":
|
|
subscriptions = parsed.get("subscriptions", [])
|
|
extra_data["subscription_count"] = len(subscriptions)
|
|
|
|
elif message_type in ["subscribe", "unsubscribe"]:
|
|
payload = parsed.get("payload", {})
|
|
extra_data["subscription_id"] = payload.get("subscriptionIdentifier")
|
|
extra_data["model_id"] = payload.get("modelId")
|
|
extra_data["model_url"] = payload.get("modelUrl", "")[:50] + "..." if len(payload.get("modelUrl", "")) > 50 else payload.get("modelUrl")
|
|
|
|
self.correlation_logger.info(f"MESSAGE RECEIVED: {message_type}", extra_data)
|
|
|
|
except (json.JSONDecodeError, KeyError) as e:
|
|
self.correlation_logger.warning(f"Failed to parse RX message: {e}")
|
|
|
|
return correlation_id
|
|
|
|
def log_tx(self, message_data: Dict[str, Any], correlation_id: Optional[str] = None) -> None:
|
|
"""
|
|
Log outgoing WebSocket message (TX).
|
|
|
|
Args:
|
|
message_data: Message data being sent
|
|
correlation_id: Optional correlation ID for tracking
|
|
"""
|
|
if correlation_id:
|
|
self.correlation_logger.set_correlation_id(correlation_id)
|
|
|
|
# Convert to JSON for logging
|
|
message_json = json.dumps(message_data, separators=(',', ':'))
|
|
|
|
# Basic TX logging
|
|
self.logger.info(f"TX -> {message_json}")
|
|
|
|
# Enhanced correlation logging
|
|
message_type = message_data.get("type", "unknown")
|
|
extra_data = {
|
|
"direction": "TX",
|
|
"message_type": message_type,
|
|
"size_bytes": len(message_json)
|
|
}
|
|
|
|
# Add specific info for important message types
|
|
if message_type == "imageDetection":
|
|
extra_data["subscription_id"] = message_data.get("subscriptionIdentifier")
|
|
extra_data["session_id"] = message_data.get("sessionId")
|
|
detection_data = message_data.get("data", {}).get("detection")
|
|
extra_data["has_detection"] = detection_data is not None
|
|
|
|
elif message_type == "stateReport":
|
|
extra_data["camera_count"] = len(message_data.get("cameraConnections", []))
|
|
extra_data["cpu_usage"] = message_data.get("cpuUsage")
|
|
extra_data["memory_usage"] = message_data.get("memoryUsage")
|
|
|
|
self.correlation_logger.info(f"MESSAGE SENT: {message_type}", extra_data)
|
|
|
|
|
|
class HTTPRequestLogger:
|
|
"""
|
|
Specialized logger for HTTP requests (MPTA downloads, etc.).
|
|
|
|
This logger tracks HTTP requests and responses for debugging
|
|
download issues and CMS backend communication.
|
|
"""
|
|
|
|
def __init__(self):
|
|
"""Initialize the HTTP request logger."""
|
|
self.logger = logging.getLogger("http.requests")
|
|
self.correlation_logger = CorrelatedLogger("http.requests.correlated")
|
|
|
|
def log_request_start(
|
|
self,
|
|
method: str,
|
|
url: str,
|
|
headers: Optional[Dict[str, str]] = None,
|
|
correlation_id: Optional[str] = None
|
|
) -> str:
|
|
"""
|
|
Log the start of an HTTP request.
|
|
|
|
Args:
|
|
method: HTTP method (GET, POST, etc.)
|
|
url: Request URL
|
|
headers: Request headers
|
|
correlation_id: Optional correlation ID for tracking
|
|
|
|
Returns:
|
|
Generated or provided correlation ID
|
|
"""
|
|
if correlation_id:
|
|
self.correlation_logger.set_correlation_id(correlation_id)
|
|
else:
|
|
correlation_id = self.correlation_logger.set_correlation_id()
|
|
|
|
extra_data = {
|
|
"method": method,
|
|
"url": url,
|
|
"start_time": time.time()
|
|
}
|
|
|
|
if headers:
|
|
# Log important headers only
|
|
important_headers = ["content-length", "content-type", "authorization"]
|
|
filtered_headers = {
|
|
k: v for k, v in headers.items()
|
|
if k.lower() in important_headers
|
|
}
|
|
if filtered_headers:
|
|
extra_data["headers"] = filtered_headers
|
|
|
|
self.correlation_logger.info(f"HTTP REQUEST START: {method} {url}", extra_data)
|
|
return correlation_id
|
|
|
|
def log_request_end(
|
|
self,
|
|
status_code: int,
|
|
response_size: Optional[int] = None,
|
|
duration_ms: Optional[float] = None,
|
|
correlation_id: Optional[str] = None
|
|
) -> None:
|
|
"""
|
|
Log the end of an HTTP request.
|
|
|
|
Args:
|
|
status_code: HTTP response status code
|
|
response_size: Response size in bytes
|
|
duration_ms: Request duration in milliseconds
|
|
correlation_id: Correlation ID for tracking
|
|
"""
|
|
if correlation_id:
|
|
self.correlation_logger.set_correlation_id(correlation_id)
|
|
|
|
extra_data = {
|
|
"status_code": status_code,
|
|
"success": 200 <= status_code < 300
|
|
}
|
|
|
|
if response_size is not None:
|
|
extra_data["response_size_bytes"] = response_size
|
|
extra_data["response_size_mb"] = round(response_size / (1024 * 1024), 2)
|
|
|
|
if duration_ms is not None:
|
|
extra_data["duration_ms"] = round(duration_ms, 2)
|
|
|
|
level_func = self.correlation_logger.info if extra_data["success"] else self.correlation_logger.error
|
|
level_func(f"HTTP REQUEST END: {status_code}", extra_data)
|
|
|
|
def log_download_progress(
|
|
self,
|
|
bytes_downloaded: int,
|
|
total_bytes: Optional[int] = None,
|
|
percent_complete: Optional[float] = None,
|
|
correlation_id: Optional[str] = None
|
|
) -> None:
|
|
"""
|
|
Log download progress for large files.
|
|
|
|
Args:
|
|
bytes_downloaded: Number of bytes downloaded so far
|
|
total_bytes: Total file size in bytes
|
|
percent_complete: Percentage complete (0-100)
|
|
correlation_id: Correlation ID for tracking
|
|
"""
|
|
if correlation_id:
|
|
self.correlation_logger.set_correlation_id(correlation_id)
|
|
|
|
extra_data = {
|
|
"bytes_downloaded": bytes_downloaded,
|
|
"mb_downloaded": round(bytes_downloaded / (1024 * 1024), 2)
|
|
}
|
|
|
|
if total_bytes:
|
|
extra_data["total_bytes"] = total_bytes
|
|
extra_data["total_mb"] = round(total_bytes / (1024 * 1024), 2)
|
|
|
|
if percent_complete:
|
|
extra_data["percent_complete"] = round(percent_complete, 1)
|
|
|
|
self.correlation_logger.debug(f"DOWNLOAD PROGRESS", extra_data)
|
|
|
|
|
|
# Global logger instances
|
|
websocket_rxtx_logger = WebSocketRXTXLogger()
|
|
http_request_logger = HTTPRequestLogger()
|
|
|
|
|
|
# Convenience functions
|
|
def get_websocket_logger() -> WebSocketRXTXLogger:
|
|
"""Get the global WebSocket RX/TX logger."""
|
|
return websocket_rxtx_logger
|
|
|
|
|
|
def get_http_logger() -> HTTPRequestLogger:
|
|
"""Get the global HTTP request logger."""
|
|
return http_request_logger
|
|
|
|
|
|
def create_correlation_logger(name: str) -> CorrelatedLogger:
|
|
"""
|
|
Create a new correlated logger with given name.
|
|
|
|
Args:
|
|
name: Logger name
|
|
|
|
Returns:
|
|
New CorrelatedLogger instance
|
|
"""
|
|
return CorrelatedLogger(name) |