Refactor: Logging Cleanup

This commit is contained in:
ziesorx 2025-09-24 20:39:32 +07:00
parent 7a9a149955
commit 5176f99ba7
9 changed files with 37 additions and 72 deletions

View file

@ -3,7 +3,7 @@ Streaming system for RTSP and HTTP camera feeds.
Provides modular frame readers, buffers, and stream management.
"""
from .readers import RTSPReader, HTTPSnapshotReader
from .buffers import FrameBuffer, CacheBuffer, shared_frame_buffer, shared_cache_buffer, save_frame_for_testing
from .buffers import FrameBuffer, CacheBuffer, shared_frame_buffer, shared_cache_buffer
from .manager import StreamManager, StreamConfig, SubscriptionInfo, shared_stream_manager
__all__ = [
@ -16,7 +16,6 @@ __all__ = [
'CacheBuffer',
'shared_frame_buffer',
'shared_cache_buffer',
'save_frame_for_testing',
# Manager
'StreamManager',

View file

@ -67,8 +67,9 @@ class FrameBuffer:
'size_mb': frame.nbytes / (1024 * 1024)
}
logger.debug(f"Stored {stream_type.value} frame for camera {camera_id}: "
f"{frame.shape[1]}x{frame.shape[0]}, {frame.nbytes / (1024 * 1024):.2f}MB")
# Commented out verbose frame storage logging
# logger.debug(f"Stored {stream_type.value} frame for camera {camera_id}: "
# f"{frame.shape[1]}x{frame.shape[0]}, {frame.nbytes / (1024 * 1024):.2f}MB")
def get_frame(self, camera_id: str) -> Optional[np.ndarray]:
"""Get the latest frame for the given camera ID."""
@ -400,31 +401,3 @@ shared_frame_buffer = FrameBuffer(max_age_seconds=5)
shared_cache_buffer = CacheBuffer(max_age_seconds=10)
def save_frame_for_testing(camera_id: str, frame: np.ndarray, test_dir: str = "test_frames"):
"""Save frame to test directory for verification purposes."""
import os
try:
os.makedirs(test_dir, exist_ok=True)
timestamp = int(time.time() * 1000) # milliseconds
filename = f"{camera_id}_{timestamp}.jpg"
filepath = os.path.join(test_dir, filename)
# Use appropriate quality based on frame size
h, w = frame.shape[:2]
if w >= 2000: # High resolution
quality = 95
else: # Standard resolution
quality = 90
encode_params = [cv2.IMWRITE_JPEG_QUALITY, quality]
success = cv2.imwrite(filepath, frame, encode_params)
if success:
size_kb = os.path.getsize(filepath) / 1024
logger.info(f"Saved test frame: {filepath} ({w}x{h}, {size_kb:.1f}KB)")
else:
logger.error(f"Failed to save test frame: {filepath}")
except Exception as e:
logger.error(f"Error saving test frame for camera {camera_id}: {e}")

View file

@ -10,7 +10,7 @@ from dataclasses import dataclass
from collections import defaultdict
from .readers import RTSPReader, HTTPSnapshotReader
from .buffers import shared_cache_buffer, save_frame_for_testing, StreamType
from .buffers import shared_cache_buffer, StreamType
from ..tracking.integration import TrackingPipelineIntegration
@ -25,7 +25,6 @@ class StreamConfig:
snapshot_url: Optional[str] = None
snapshot_interval: int = 5000 # milliseconds
max_retries: int = 3
save_test_frames: bool = False
@dataclass
@ -184,13 +183,6 @@ class StreamManager:
# Store frame in shared buffer with stream type
shared_cache_buffer.put_frame(camera_id, frame, stream_type)
# Save test frames if enabled for any subscription
with self._lock:
for subscription_id in self._camera_subscribers[camera_id]:
subscription_info = self._subscriptions[subscription_id]
if subscription_info.stream_config.save_test_frames:
save_frame_for_testing(camera_id, frame)
break # Only save once per frame
# Process tracking for subscriptions with tracking integration
self._process_tracking_for_camera(camera_id, frame)
@ -349,7 +341,6 @@ class StreamManager:
snapshot_url=payload.get('snapshotUrl'),
snapshot_interval=payload.get('snapshotInterval', 5000),
max_retries=3,
save_test_frames=True # Enable for testing
)
return self.add_subscription(