python-detector-worker/core/logging/session_logger.py
ziesorx 34d1982e9e
All checks were successful
Build Worker Base and Application Images / check-base-changes (push) Successful in 7s
Build Worker Base and Application Images / build-base (push) Has been skipped
Build Worker Base and Application Images / build-docker (push) Successful in 2m52s
Build Worker Base and Application Images / deploy-stack (push) Successful in 9s
refactor: half way to process per session
2025-09-25 20:52:26 +07:00

356 lines
No EOL
12 KiB
Python

"""
Per-Session Logging Configuration and Management.
Each session process gets its own dedicated log file with rotation support.
"""
import logging
import logging.handlers
import os
import sys
from pathlib import Path
from typing import Optional
from datetime import datetime
import re
class PerSessionLogger:
"""
Per-session logging configuration that creates dedicated log files for each session.
Supports log rotation and structured logging with session context.
"""
def __init__(
self,
session_id: str,
subscription_identifier: str,
log_dir: str = "logs",
max_size_mb: int = 100,
backup_count: int = 5,
log_level: int = logging.INFO,
detection_mode: bool = True
):
"""
Initialize per-session logger.
Args:
session_id: Unique session identifier
subscription_identifier: Subscription identifier (contains camera info)
log_dir: Directory to store log files
max_size_mb: Maximum size of each log file in MB
backup_count: Number of backup files to keep
log_level: Logging level
detection_mode: If True, uses reduced verbosity for detection processes
"""
self.session_id = session_id
self.subscription_identifier = subscription_identifier
self.log_dir = Path(log_dir)
self.max_size_mb = max_size_mb
self.backup_count = backup_count
self.log_level = log_level
self.detection_mode = detection_mode
# Ensure log directory exists
self.log_dir.mkdir(parents=True, exist_ok=True)
# Generate clean filename from subscription identifier
self.log_filename = self._generate_log_filename()
self.log_filepath = self.log_dir / self.log_filename
# Create logger
self.logger = self._setup_logger()
def _generate_log_filename(self) -> str:
"""
Generate a clean filename from subscription identifier.
Format: detector_worker_camera_{clean_subscription_id}.log
Returns:
Clean filename for the log file
"""
# Clean subscription identifier for filename
# Replace problematic characters with underscores
clean_sub_id = re.sub(r'[^\w\-_.]', '_', self.subscription_identifier)
# Remove consecutive underscores
clean_sub_id = re.sub(r'_+', '_', clean_sub_id)
# Remove leading/trailing underscores
clean_sub_id = clean_sub_id.strip('_')
# Generate filename
filename = f"detector_worker_camera_{clean_sub_id}.log"
return filename
def _setup_logger(self) -> logging.Logger:
"""
Setup logger with file handler and rotation.
Returns:
Configured logger instance
"""
# Create logger with unique name
logger_name = f"session_worker_{self.session_id}"
logger = logging.getLogger(logger_name)
# Clear any existing handlers to avoid duplicates
logger.handlers.clear()
# Set logging level
logger.setLevel(self.log_level)
# Create formatter with session context
formatter = logging.Formatter(
fmt='%(asctime)s [%(levelname)s] %(name)s [Session: {session_id}] [Camera: {camera}]: %(message)s'.format(
session_id=self.session_id,
camera=self.subscription_identifier
),
datefmt='%Y-%m-%d %H:%M:%S'
)
# Create rotating file handler
max_bytes = self.max_size_mb * 1024 * 1024 # Convert MB to bytes
file_handler = logging.handlers.RotatingFileHandler(
filename=self.log_filepath,
maxBytes=max_bytes,
backupCount=self.backup_count,
encoding='utf-8'
)
file_handler.setLevel(self.log_level)
file_handler.setFormatter(formatter)
# Create console handler for debugging (optional)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.WARNING) # Only warnings and errors to console
console_formatter = logging.Formatter(
fmt='[{session_id}] [%(levelname)s]: %(message)s'.format(
session_id=self.session_id
)
)
console_handler.setFormatter(console_formatter)
# Add handlers to logger
logger.addHandler(file_handler)
logger.addHandler(console_handler)
# Prevent propagation to root logger
logger.propagate = False
# Log initialization (reduced verbosity in detection mode)
if self.detection_mode:
logger.info(f"Session logger ready for {self.subscription_identifier}")
else:
logger.info(f"Per-session logger initialized")
logger.info(f"Log file: {self.log_filepath}")
logger.info(f"Session ID: {self.session_id}")
logger.info(f"Camera: {self.subscription_identifier}")
logger.info(f"Max size: {self.max_size_mb}MB, Backup count: {self.backup_count}")
return logger
def get_logger(self) -> logging.Logger:
"""
Get the configured logger instance.
Returns:
Logger instance for this session
"""
return self.logger
def log_session_start(self, process_id: int):
"""
Log session start with process information.
Args:
process_id: Process ID of the session worker
"""
if self.detection_mode:
self.logger.info(f"Session started - PID {process_id}")
else:
self.logger.info("=" * 60)
self.logger.info(f"SESSION STARTED")
self.logger.info(f"Process ID: {process_id}")
self.logger.info(f"Session ID: {self.session_id}")
self.logger.info(f"Camera: {self.subscription_identifier}")
self.logger.info(f"Timestamp: {datetime.now().isoformat()}")
self.logger.info("=" * 60)
def log_session_end(self):
"""Log session end."""
self.logger.info("=" * 60)
self.logger.info(f"SESSION ENDED")
self.logger.info(f"Timestamp: {datetime.now().isoformat()}")
self.logger.info("=" * 60)
def log_model_loading(self, model_id: int, model_name: str, model_path: str):
"""
Log model loading information.
Args:
model_id: Model ID
model_name: Model name
model_path: Path to the model
"""
if self.detection_mode:
self.logger.info(f"Loading model {model_id}: {model_name}")
else:
self.logger.info("-" * 40)
self.logger.info(f"MODEL LOADING")
self.logger.info(f"Model ID: {model_id}")
self.logger.info(f"Model Name: {model_name}")
self.logger.info(f"Model Path: {model_path}")
self.logger.info("-" * 40)
def log_frame_processing(self, frame_count: int, processing_time: float, detections: int):
"""
Log frame processing information.
Args:
frame_count: Current frame count
processing_time: Processing time in seconds
detections: Number of detections found
"""
self.logger.debug(f"FRAME #{frame_count}: Processing time: {processing_time:.3f}s, Detections: {detections}")
def log_detection_result(self, detection_type: str, confidence: float, bbox: list):
"""
Log detection result.
Args:
detection_type: Type of detection (e.g., "Car", "Frontal")
confidence: Detection confidence
bbox: Bounding box coordinates
"""
self.logger.info(f"DETECTION: {detection_type} (conf: {confidence:.3f}) at {bbox}")
def log_database_operation(self, operation: str, session_id: str, success: bool):
"""
Log database operation.
Args:
operation: Type of operation
session_id: Session ID used in database
success: Whether operation succeeded
"""
status = "SUCCESS" if success else "FAILED"
self.logger.info(f"DATABASE {operation}: {status} (session: {session_id})")
def log_error(self, error_type: str, error_message: str, traceback_str: Optional[str] = None):
"""
Log error with context.
Args:
error_type: Type of error
error_message: Error message
traceback_str: Optional traceback string
"""
self.logger.error(f"ERROR [{error_type}]: {error_message}")
if traceback_str:
self.logger.error(f"Traceback:\n{traceback_str}")
def get_log_stats(self) -> dict:
"""
Get logging statistics.
Returns:
Dictionary with logging statistics
"""
try:
if self.log_filepath.exists():
stat = self.log_filepath.stat()
return {
'log_file': str(self.log_filepath),
'file_size_mb': round(stat.st_size / (1024 * 1024), 2),
'created': datetime.fromtimestamp(stat.st_ctime).isoformat(),
'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(),
}
else:
return {'log_file': str(self.log_filepath), 'status': 'not_created'}
except Exception as e:
return {'log_file': str(self.log_filepath), 'error': str(e)}
def cleanup(self):
"""Cleanup logger handlers."""
if hasattr(self, 'logger') and self.logger:
for handler in self.logger.handlers[:]:
handler.close()
self.logger.removeHandler(handler)
class MainProcessLogger:
"""
Logger configuration for the main FastAPI process.
Separate from session logs to avoid confusion.
"""
def __init__(self, log_dir: str = "logs", max_size_mb: int = 50, backup_count: int = 3):
"""
Initialize main process logger.
Args:
log_dir: Directory to store log files
max_size_mb: Maximum size of each log file in MB
backup_count: Number of backup files to keep
"""
self.log_dir = Path(log_dir)
self.max_size_mb = max_size_mb
self.backup_count = backup_count
# Ensure log directory exists
self.log_dir.mkdir(parents=True, exist_ok=True)
# Setup main process logger
self._setup_main_logger()
def _setup_main_logger(self):
"""Setup main process logger."""
# Configure root logger
root_logger = logging.getLogger("detector_worker")
# Clear existing handlers
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
# Set level
root_logger.setLevel(logging.INFO)
# Create formatter
formatter = logging.Formatter(
fmt='%(asctime)s [%(levelname)s] %(name)s [MAIN]: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# Create rotating file handler for main process
max_bytes = self.max_size_mb * 1024 * 1024
main_log_path = self.log_dir / "detector_worker_main.log"
file_handler = logging.handlers.RotatingFileHandler(
filename=main_log_path,
maxBytes=max_bytes,
backupCount=self.backup_count,
encoding='utf-8'
)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(formatter)
# Create console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(formatter)
# Add handlers
root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler)
# Log initialization
root_logger.info("Main process logger initialized")
root_logger.info(f"Main log file: {main_log_path}")
def setup_main_process_logging(log_dir: str = "logs"):
"""
Setup logging for the main FastAPI process.
Args:
log_dir: Directory to store log files
"""
MainProcessLogger(log_dir=log_dir)