Merge pull request 'fix: tracking and abandon and add fallback' (#24) from dev into main
Some checks failed
Build Worker Base and Application Images / deploy-stack (push) Blocked by required conditions
Build Worker Base and Application Images / check-base-changes (push) Successful in 9s
Build Worker Base and Application Images / build-base (push) Has been skipped
Build Worker Base and Application Images / build-docker (push) Has been cancelled
Some checks failed
Build Worker Base and Application Images / deploy-stack (push) Blocked by required conditions
Build Worker Base and Application Images / check-base-changes (push) Successful in 9s
Build Worker Base and Application Images / build-base (push) Has been skipped
Build Worker Base and Application Images / build-docker (push) Has been cancelled
Reviewed-on: #24
This commit is contained in:
commit
a50b3dbcdf
4 changed files with 130 additions and 50 deletions
9
app.py
9
app.py
|
@ -201,10 +201,11 @@ else:
|
||||||
os.makedirs("models", exist_ok=True)
|
os.makedirs("models", exist_ok=True)
|
||||||
logger.info("Ensured models directory exists")
|
logger.info("Ensured models directory exists")
|
||||||
|
|
||||||
# Initialize stream manager with config value
|
# Stream manager already initialized at module level with max_streams=20
|
||||||
from core.streaming import initialize_stream_manager
|
# Calling initialize_stream_manager() creates a NEW instance, breaking references
|
||||||
initialize_stream_manager(max_streams=config.get('max_streams', 10))
|
# from core.streaming import initialize_stream_manager
|
||||||
logger.info(f"Initialized stream manager with max_streams={config.get('max_streams', 10)}")
|
# initialize_stream_manager(max_streams=config.get('max_streams', 10))
|
||||||
|
logger.info(f"Using stream manager with max_streams=20 (module-level initialization)")
|
||||||
|
|
||||||
# Frames are now stored in the shared cache buffer from core.streaming.buffers
|
# Frames are now stored in the shared cache buffer from core.streaming.buffers
|
||||||
# latest_frames = {} # Deprecated - using shared_cache_buffer instead
|
# latest_frames = {} # Deprecated - using shared_cache_buffer instead
|
||||||
|
|
|
@ -64,6 +64,10 @@ class DetectionPipeline:
|
||||||
# SessionId to processing results mapping (for combining with license plate results)
|
# SessionId to processing results mapping (for combining with license plate results)
|
||||||
self.session_processing_results = {}
|
self.session_processing_results = {}
|
||||||
|
|
||||||
|
# Field mappings from parallelActions (e.g., {"car_brand": "{car_brand_cls_v3.brand}"})
|
||||||
|
self.field_mappings = {}
|
||||||
|
self._parse_field_mappings()
|
||||||
|
|
||||||
# Statistics
|
# Statistics
|
||||||
self.stats = {
|
self.stats = {
|
||||||
'detections_processed': 0,
|
'detections_processed': 0,
|
||||||
|
@ -74,6 +78,25 @@ class DetectionPipeline:
|
||||||
|
|
||||||
logger.info("DetectionPipeline initialized")
|
logger.info("DetectionPipeline initialized")
|
||||||
|
|
||||||
|
def _parse_field_mappings(self):
|
||||||
|
"""
|
||||||
|
Parse field mappings from parallelActions.postgresql_update_combined.fields.
|
||||||
|
Extracts mappings like {"car_brand": "{car_brand_cls_v3.brand}"} for dynamic field resolution.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not self.pipeline_config or not hasattr(self.pipeline_config, 'parallel_actions'):
|
||||||
|
return
|
||||||
|
|
||||||
|
for action in self.pipeline_config.parallel_actions:
|
||||||
|
if action.type.value == 'postgresql_update_combined':
|
||||||
|
fields = action.params.get('fields', {})
|
||||||
|
self.field_mappings = fields
|
||||||
|
logger.info(f"[FIELD MAPPINGS] Parsed from pipeline config: {self.field_mappings}")
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error parsing field mappings: {e}", exc_info=True)
|
||||||
|
|
||||||
async def initialize(self) -> bool:
|
async def initialize(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Initialize all pipeline components including models, Redis, and database.
|
Initialize all pipeline components including models, Redis, and database.
|
||||||
|
@ -165,6 +188,44 @@ class DetectionPipeline:
|
||||||
logger.error(f"Error initializing detection model: {e}", exc_info=True)
|
logger.error(f"Error initializing detection model: {e}", exc_info=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _extract_fields_from_branches(self, branch_results: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Extract fields dynamically from branch results using field mappings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
branch_results: Dictionary of branch execution results
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with extracted field values (e.g., {"car_brand": "Honda", "body_type": "Sedan"})
|
||||||
|
"""
|
||||||
|
extracted = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
for db_field_name, template in self.field_mappings.items():
|
||||||
|
# Parse template like "{car_brand_cls_v3.brand}" -> branch_id="car_brand_cls_v3", field="brand"
|
||||||
|
if template.startswith('{') and template.endswith('}'):
|
||||||
|
var_name = template[1:-1]
|
||||||
|
if '.' in var_name:
|
||||||
|
branch_id, field_name = var_name.split('.', 1)
|
||||||
|
|
||||||
|
# Look up value in branch_results
|
||||||
|
if branch_id in branch_results:
|
||||||
|
branch_data = branch_results[branch_id]
|
||||||
|
if isinstance(branch_data, dict) and 'result' in branch_data:
|
||||||
|
result_data = branch_data['result']
|
||||||
|
if isinstance(result_data, dict) and field_name in result_data:
|
||||||
|
extracted[field_name] = result_data[field_name]
|
||||||
|
logger.debug(f"[DYNAMIC EXTRACT] {field_name}={result_data[field_name]} from branch {branch_id}")
|
||||||
|
else:
|
||||||
|
logger.debug(f"[DYNAMIC EXTRACT] Field '{field_name}' not found in branch {branch_id}")
|
||||||
|
else:
|
||||||
|
logger.debug(f"[DYNAMIC EXTRACT] Branch '{branch_id}' not in results")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error extracting fields from branches: {e}", exc_info=True)
|
||||||
|
|
||||||
|
return extracted
|
||||||
|
|
||||||
async def _on_license_plate_result(self, session_id: str, license_data: Dict[str, Any]):
|
async def _on_license_plate_result(self, session_id: str, license_data: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
Callback for handling license plate results from LPR service.
|
Callback for handling license plate results from LPR service.
|
||||||
|
@ -272,12 +333,12 @@ class DetectionPipeline:
|
||||||
branch_results = self.session_processing_results[session_id_for_lookup]
|
branch_results = self.session_processing_results[session_id_for_lookup]
|
||||||
logger.info(f"[LICENSE PLATE] Retrieved processing results for session {session_id_for_lookup}")
|
logger.info(f"[LICENSE PLATE] Retrieved processing results for session {session_id_for_lookup}")
|
||||||
|
|
||||||
if 'car_brand_cls_v2' in branch_results:
|
# Extract fields dynamically using field mappings from pipeline config
|
||||||
brand_result = branch_results['car_brand_cls_v2'].get('result', {})
|
extracted_fields = self._extract_fields_from_branches(branch_results)
|
||||||
car_brand = brand_result.get('brand')
|
car_brand = extracted_fields.get('brand')
|
||||||
if 'car_bodytype_cls_v1' in branch_results:
|
body_type = extracted_fields.get('body_type')
|
||||||
bodytype_result = branch_results['car_bodytype_cls_v1'].get('result', {})
|
|
||||||
body_type = bodytype_result.get('body_type')
|
logger.info(f"[LICENSE PLATE] Extracted fields: brand={car_brand}, body_type={body_type}")
|
||||||
|
|
||||||
# Clean up stored results after use
|
# Clean up stored results after use
|
||||||
del self.session_processing_results[session_id_for_lookup]
|
del self.session_processing_results[session_id_for_lookup]
|
||||||
|
@ -1003,7 +1064,7 @@ class DetectionPipeline:
|
||||||
Resolve field template using branch results and context.
|
Resolve field template using branch results and context.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
template: Template string like "{car_brand_cls_v2.brand}"
|
template: Template string like "{car_brand_cls_v3.brand}"
|
||||||
branch_results: Dictionary of branch execution results
|
branch_results: Dictionary of branch execution results
|
||||||
context: Detection context
|
context: Detection context
|
||||||
|
|
||||||
|
@ -1015,7 +1076,7 @@ class DetectionPipeline:
|
||||||
if template.startswith('{') and template.endswith('}'):
|
if template.startswith('{') and template.endswith('}'):
|
||||||
var_name = template[1:-1]
|
var_name = template[1:-1]
|
||||||
|
|
||||||
# Check for branch result reference (e.g., "car_brand_cls_v2.brand")
|
# Check for branch result reference (e.g., "car_brand_cls_v3.brand")
|
||||||
if '.' in var_name:
|
if '.' in var_name:
|
||||||
branch_id, field_name = var_name.split('.', 1)
|
branch_id, field_name = var_name.split('.', 1)
|
||||||
if branch_id in branch_results:
|
if branch_id in branch_results:
|
||||||
|
@ -1061,17 +1122,10 @@ class DetectionPipeline:
|
||||||
logger.warning("No session_id in context for processing results")
|
logger.warning("No session_id in context for processing results")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Extract car brand from car_brand_cls_v2 results
|
# Extract fields dynamically using field mappings from pipeline config
|
||||||
car_brand = None
|
extracted_fields = self._extract_fields_from_branches(branch_results)
|
||||||
if 'car_brand_cls_v2' in branch_results:
|
car_brand = extracted_fields.get('brand')
|
||||||
brand_result = branch_results['car_brand_cls_v2'].get('result', {})
|
body_type = extracted_fields.get('body_type')
|
||||||
car_brand = brand_result.get('brand')
|
|
||||||
|
|
||||||
# Extract body type from car_bodytype_cls_v1 results
|
|
||||||
body_type = None
|
|
||||||
if 'car_bodytype_cls_v1' in branch_results:
|
|
||||||
bodytype_result = branch_results['car_bodytype_cls_v1'].get('result', {})
|
|
||||||
body_type = bodytype_result.get('body_type')
|
|
||||||
|
|
||||||
logger.info(f"[PROCESSING RESULTS] Completed for session {session_id}: "
|
logger.info(f"[PROCESSING RESULTS] Completed for session {session_id}: "
|
||||||
f"brand={car_brand}, bodyType={body_type}")
|
f"brand={car_brand}, bodyType={body_type}")
|
||||||
|
|
|
@ -85,8 +85,9 @@ class StreamManager:
|
||||||
with self._round_robin_lock:
|
with self._round_robin_lock:
|
||||||
if camera_id not in self._camera_list:
|
if camera_id not in self._camera_list:
|
||||||
self._camera_list.append(camera_id)
|
self._camera_list.append(camera_id)
|
||||||
|
logger.info(f"Created tracking queue for camera {camera_id}")
|
||||||
logger.info(f"Created tracking queue for camera {camera_id}")
|
else:
|
||||||
|
logger.debug(f"Camera {camera_id} already has tracking queue")
|
||||||
|
|
||||||
def _remove_camera_queue(self, camera_id: str):
|
def _remove_camera_queue(self, camera_id: str):
|
||||||
"""Remove tracking queue for a camera that's no longer active."""
|
"""Remove tracking queue for a camera that's no longer active."""
|
||||||
|
@ -153,6 +154,10 @@ class StreamManager:
|
||||||
if not success:
|
if not success:
|
||||||
self._remove_subscription_internal(subscription_id)
|
self._remove_subscription_internal(subscription_id)
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
|
# Stream already exists, but ensure queue exists too
|
||||||
|
logger.info(f"Stream already exists for {camera_id}, ensuring queue exists")
|
||||||
|
self._ensure_camera_queue(camera_id)
|
||||||
|
|
||||||
logger.info(f"Added subscription {subscription_id} for camera {camera_id} "
|
logger.info(f"Added subscription {subscription_id} for camera {camera_id} "
|
||||||
f"({len(self._camera_subscribers[camera_id])} total subscribers)")
|
f"({len(self._camera_subscribers[camera_id])} total subscribers)")
|
||||||
|
@ -367,29 +372,31 @@ class StreamManager:
|
||||||
def _get_next_camera_item(self):
|
def _get_next_camera_item(self):
|
||||||
"""Get next item from camera queues using round-robin scheduling."""
|
"""Get next item from camera queues using round-robin scheduling."""
|
||||||
with self._round_robin_lock:
|
with self._round_robin_lock:
|
||||||
if not self._camera_list:
|
# Get current list of cameras from actual tracking queues (central state)
|
||||||
|
camera_list = list(self._tracking_queues.keys())
|
||||||
|
|
||||||
|
if not camera_list:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
attempts = 0
|
attempts = 0
|
||||||
max_attempts = len(self._camera_list)
|
max_attempts = len(camera_list)
|
||||||
|
|
||||||
while attempts < max_attempts:
|
while attempts < max_attempts:
|
||||||
# Get current camera
|
# Get current camera using round-robin index
|
||||||
if self._camera_round_robin_index >= len(self._camera_list):
|
if self._camera_round_robin_index >= len(camera_list):
|
||||||
self._camera_round_robin_index = 0
|
self._camera_round_robin_index = 0
|
||||||
|
|
||||||
camera_id = self._camera_list[self._camera_round_robin_index]
|
camera_id = camera_list[self._camera_round_robin_index]
|
||||||
|
|
||||||
# Move to next camera for next call
|
# Move to next camera for next call
|
||||||
self._camera_round_robin_index = (self._camera_round_robin_index + 1) % len(self._camera_list)
|
self._camera_round_robin_index = (self._camera_round_robin_index + 1) % len(camera_list)
|
||||||
|
|
||||||
# Try to get item from this camera's queue
|
# Try to get item from this camera's queue
|
||||||
if camera_id in self._tracking_queues:
|
try:
|
||||||
try:
|
item = self._tracking_queues[camera_id].get_nowait()
|
||||||
item = self._tracking_queues[camera_id].get_nowait()
|
return camera_id, item
|
||||||
return camera_id, item
|
except queue.Empty:
|
||||||
except queue.Empty:
|
pass # Try next camera
|
||||||
pass # Try next camera
|
|
||||||
|
|
||||||
attempts += 1
|
attempts += 1
|
||||||
|
|
||||||
|
@ -404,7 +411,12 @@ class StreamManager:
|
||||||
for subscription_id in subscription_ids:
|
for subscription_id in subscription_ids:
|
||||||
subscription_info = self._subscriptions.get(subscription_id)
|
subscription_info = self._subscriptions.get(subscription_id)
|
||||||
|
|
||||||
if not subscription_info or not subscription_info.tracking_integration:
|
if not subscription_info:
|
||||||
|
logger.warning(f"No subscription info found for {subscription_id}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not subscription_info.tracking_integration:
|
||||||
|
logger.debug(f"No tracking integration for {subscription_id} (camera {camera_id}), skipping inference")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
display_id = subscription_id.split(';')[0] if ';' in subscription_id else subscription_id
|
display_id = subscription_id.split(';')[0] if ';' in subscription_id else subscription_id
|
||||||
|
|
|
@ -220,8 +220,10 @@ class TrackingPipelineIntegration:
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update last detection time for abandonment detection
|
# Update last detection time for abandonment detection
|
||||||
|
# Update when vehicles ARE detected, so when they leave, timestamp ages
|
||||||
if tracked_vehicles:
|
if tracked_vehicles:
|
||||||
self.last_detection_time[display_id] = time.time()
|
self.last_detection_time[display_id] = time.time()
|
||||||
|
logger.debug(f"Updated last_detection_time for {display_id}: {len(tracked_vehicles)} vehicles")
|
||||||
|
|
||||||
# Check for car abandonment (vehicle left after getting car_wait_staff stage)
|
# Check for car abandonment (vehicle left after getting car_wait_staff stage)
|
||||||
await self._check_car_abandonment(display_id, subscription_id)
|
await self._check_car_abandonment(display_id, subscription_id)
|
||||||
|
@ -521,19 +523,23 @@ class TrackingPipelineIntegration:
|
||||||
logger.warning(f"No pending processing data found for display {display_id} when setting session {session_id}")
|
logger.warning(f"No pending processing data found for display {display_id} when setting session {session_id}")
|
||||||
|
|
||||||
# FALLBACK: Execute pipeline for POS-initiated sessions
|
# FALLBACK: Execute pipeline for POS-initiated sessions
|
||||||
# Use stored subscription_id instead of creating fake one
|
# Skip if session_id is None (no car present or car has left)
|
||||||
stored_subscription_id = self.display_to_subscription.get(display_id)
|
if session_id is not None:
|
||||||
if stored_subscription_id:
|
# Use stored subscription_id instead of creating fake one
|
||||||
logger.info(f"[FALLBACK] Triggering fallback pipeline for session {session_id} on display {display_id} with subscription {stored_subscription_id}")
|
stored_subscription_id = self.display_to_subscription.get(display_id)
|
||||||
|
if stored_subscription_id:
|
||||||
|
logger.info(f"[FALLBACK] Triggering fallback pipeline for session {session_id} on display {display_id} with subscription {stored_subscription_id}")
|
||||||
|
|
||||||
# Trigger the fallback pipeline asynchronously with real subscription_id
|
# Trigger the fallback pipeline asynchronously with real subscription_id
|
||||||
asyncio.create_task(self._execute_fallback_pipeline(
|
asyncio.create_task(self._execute_fallback_pipeline(
|
||||||
display_id=display_id,
|
display_id=display_id,
|
||||||
session_id=session_id,
|
session_id=session_id,
|
||||||
subscription_id=stored_subscription_id
|
subscription_id=stored_subscription_id
|
||||||
))
|
))
|
||||||
|
else:
|
||||||
|
logger.error(f"[FALLBACK] No subscription_id stored for display {display_id}, cannot execute fallback pipeline")
|
||||||
else:
|
else:
|
||||||
logger.error(f"[FALLBACK] No subscription_id stored for display {display_id}, cannot execute fallback pipeline")
|
logger.debug(f"[FALLBACK] Skipping pipeline execution for session_id=None on display {display_id}")
|
||||||
|
|
||||||
def clear_session_id(self, session_id: str):
|
def clear_session_id(self, session_id: str):
|
||||||
"""
|
"""
|
||||||
|
@ -628,10 +634,16 @@ class TrackingPipelineIntegration:
|
||||||
last_detection = self.last_detection_time.get(session_display, 0)
|
last_detection = self.last_detection_time.get(session_display, 0)
|
||||||
time_since_detection = current_time - last_detection
|
time_since_detection = current_time - last_detection
|
||||||
|
|
||||||
|
logger.info(f"[ABANDON CHECK] Session {session_id} (display: {session_display}): "
|
||||||
|
f"time_since_detection={time_since_detection:.1f}s, "
|
||||||
|
f"timeout={self.abandonment_timeout}s")
|
||||||
|
|
||||||
if time_since_detection > self.abandonment_timeout:
|
if time_since_detection > self.abandonment_timeout:
|
||||||
logger.info(f"Car abandonment detected: session {session_id}, "
|
logger.warning(f"🚨 Car abandonment detected: session {session_id}, "
|
||||||
f"no detection for {time_since_detection:.1f}s")
|
f"no detection for {time_since_detection:.1f}s")
|
||||||
abandoned_sessions.append(session_id)
|
abandoned_sessions.append(session_id)
|
||||||
|
else:
|
||||||
|
logger.debug(f"[ABANDON CHECK] Session {session_id} has no associated display")
|
||||||
|
|
||||||
# Send abandonment detection for each abandoned session
|
# Send abandonment detection for each abandoned session
|
||||||
for session_id in abandoned_sessions:
|
for session_id in abandoned_sessions:
|
||||||
|
@ -639,6 +651,7 @@ class TrackingPipelineIntegration:
|
||||||
# Remove from progression stages to avoid repeated detection
|
# Remove from progression stages to avoid repeated detection
|
||||||
if session_id in self.progression_stages:
|
if session_id in self.progression_stages:
|
||||||
del self.progression_stages[session_id]
|
del self.progression_stages[session_id]
|
||||||
|
logger.info(f"[ABANDON] Removed session {session_id} from progression_stages after notification")
|
||||||
|
|
||||||
async def _send_abandonment_detection(self, subscription_id: str, session_id: str):
|
async def _send_abandonment_detection(self, subscription_id: str, session_id: str):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue