feat/tracking and save in redis finished
This commit is contained in:
parent
3a4a27ca68
commit
5873945115
8 changed files with 393 additions and 245 deletions
100
app.py
100
app.py
|
@ -118,15 +118,34 @@ def download_mpta(url: str, dest_path: str) -> str:
|
|||
def fetch_snapshot(url: str):
|
||||
try:
|
||||
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
|
||||
import requests.adapters
|
||||
import urllib3
|
||||
|
||||
# Parse URL to extract credentials
|
||||
parsed = urlparse(url)
|
||||
|
||||
# Prepare headers - some cameras require User-Agent
|
||||
# Prepare headers - some cameras require User-Agent and specific headers
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (compatible; DetectorWorker/1.0)'
|
||||
'User-Agent': 'Mozilla/5.0 (compatible; DetectorWorker/1.0)',
|
||||
'Accept': 'image/jpeg,image/*,*/*',
|
||||
'Connection': 'close',
|
||||
'Cache-Control': 'no-cache'
|
||||
}
|
||||
|
||||
# Create a session with custom adapter for better connection handling
|
||||
session = requests.Session()
|
||||
adapter = requests.adapters.HTTPAdapter(
|
||||
pool_connections=1,
|
||||
pool_maxsize=1,
|
||||
max_retries=urllib3.util.retry.Retry(
|
||||
total=2,
|
||||
backoff_factor=0.1,
|
||||
status_forcelist=[500, 502, 503, 504]
|
||||
)
|
||||
)
|
||||
session.mount('http://', adapter)
|
||||
session.mount('https://', adapter)
|
||||
|
||||
# Reconstruct URL without credentials
|
||||
clean_url = f"{parsed.scheme}://{parsed.hostname}"
|
||||
if parsed.port:
|
||||
|
@ -136,44 +155,68 @@ def fetch_snapshot(url: str):
|
|||
clean_url += f"?{parsed.query}"
|
||||
|
||||
auth = None
|
||||
response = None
|
||||
|
||||
if parsed.username and parsed.password:
|
||||
# Try HTTP Digest authentication first (common for IP cameras)
|
||||
try:
|
||||
auth = HTTPDigestAuth(parsed.username, parsed.password)
|
||||
response = requests.get(clean_url, auth=auth, headers=headers, timeout=10)
|
||||
response = session.get(clean_url, auth=auth, headers=headers, timeout=(5, 15), stream=True)
|
||||
if response.status_code == 200:
|
||||
logger.debug(f"Successfully authenticated using HTTP Digest for {clean_url}")
|
||||
elif response.status_code == 401:
|
||||
# If Digest fails, try Basic auth
|
||||
logger.debug(f"HTTP Digest failed, trying Basic auth for {clean_url}")
|
||||
auth = HTTPBasicAuth(parsed.username, parsed.password)
|
||||
response = requests.get(clean_url, auth=auth, headers=headers, timeout=10)
|
||||
response = session.get(clean_url, auth=auth, headers=headers, timeout=(5, 15), stream=True)
|
||||
if response.status_code == 200:
|
||||
logger.debug(f"Successfully authenticated using HTTP Basic for {clean_url}")
|
||||
except Exception as auth_error:
|
||||
logger.debug(f"Authentication setup error: {auth_error}")
|
||||
# Fallback to original URL with embedded credentials
|
||||
response = requests.get(url, headers=headers, timeout=10)
|
||||
response = session.get(url, headers=headers, timeout=(5, 15), stream=True)
|
||||
else:
|
||||
# No credentials in URL, make request as-is
|
||||
response = requests.get(url, headers=headers, timeout=10)
|
||||
response = session.get(url, headers=headers, timeout=(5, 15), stream=True)
|
||||
|
||||
if response.status_code == 200:
|
||||
if response and response.status_code == 200:
|
||||
# Read content with size limit to prevent memory issues
|
||||
content = b''
|
||||
max_size = 10 * 1024 * 1024 # 10MB limit
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
content += chunk
|
||||
if len(content) > max_size:
|
||||
logger.error(f"Snapshot too large (>{max_size} bytes) from {clean_url}")
|
||||
return None
|
||||
|
||||
# Convert response content to numpy array
|
||||
nparr = np.frombuffer(response.content, np.uint8)
|
||||
nparr = np.frombuffer(content, np.uint8)
|
||||
# Decode image
|
||||
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
if frame is not None:
|
||||
logger.debug(f"Successfully fetched snapshot from {clean_url}, shape: {frame.shape}")
|
||||
logger.debug(f"Successfully fetched snapshot from {clean_url}, shape: {frame.shape}, size: {len(content)} bytes")
|
||||
return frame
|
||||
else:
|
||||
logger.error(f"Failed to decode image from snapshot URL: {clean_url}")
|
||||
logger.error(f"Failed to decode image from snapshot URL: {clean_url} (content size: {len(content)} bytes)")
|
||||
return None
|
||||
else:
|
||||
elif response:
|
||||
logger.error(f"Failed to fetch snapshot (status code {response.status_code}): {clean_url}")
|
||||
# Log response headers and first part of content for debugging
|
||||
logger.debug(f"Response headers: {dict(response.headers)}")
|
||||
if len(response.content) < 1000:
|
||||
logger.debug(f"Response content: {response.content[:500]}")
|
||||
return None
|
||||
else:
|
||||
logger.error(f"No response received from snapshot URL: {clean_url}")
|
||||
return None
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.error(f"Timeout fetching snapshot from {url}: {str(e)}")
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error(f"Connection error fetching snapshot from {url}: {str(e)}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Exception fetching snapshot from {url}: {str(e)}")
|
||||
logger.error(f"Exception fetching snapshot from {url}: {str(e)}", exc_info=True)
|
||||
return None
|
||||
|
||||
# Helper to get crop coordinates from stream
|
||||
|
@ -324,7 +367,7 @@ async def detect(websocket: WebSocket):
|
|||
detection_data = {
|
||||
"type": "imageDetection",
|
||||
"subscriptionIdentifier": stream["subscriptionIdentifier"],
|
||||
"timestamp": time.strftime("%Y-%m-%dT%H:%M:%S.%fZ", time.gmtime()),
|
||||
"timestamp": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
|
||||
# "timestamp": time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()) + f".{int(time.time() * 1000) % 1000:03d}Z",
|
||||
"data": {
|
||||
"detection": detection_dict,
|
||||
|
@ -452,6 +495,7 @@ async def detect(websocket: WebSocket):
|
|||
def snapshot_reader(camera_id, snapshot_url, snapshot_interval, buffer, stop_event):
|
||||
"""Frame reader that fetches snapshots from HTTP/HTTPS URL at specified intervals"""
|
||||
retries = 0
|
||||
consecutive_failures = 0 # Track consecutive failures for backoff
|
||||
logger.info(f"Starting snapshot reader thread for camera {camera_id} from {snapshot_url}")
|
||||
frame_count = 0
|
||||
last_log_time = time.time()
|
||||
|
@ -466,15 +510,31 @@ async def detect(websocket: WebSocket):
|
|||
frame = fetch_snapshot(snapshot_url)
|
||||
|
||||
if frame is None:
|
||||
logger.warning(f"Failed to fetch snapshot for camera: {camera_id}, retry {retries+1}/{max_retries}")
|
||||
consecutive_failures += 1
|
||||
logger.warning(f"Failed to fetch snapshot for camera: {camera_id}, consecutive failures: {consecutive_failures}")
|
||||
retries += 1
|
||||
|
||||
# Check network connectivity with a simple ping-like test
|
||||
if consecutive_failures % 5 == 1: # Every 5th failure, test connectivity
|
||||
try:
|
||||
test_response = requests.get(snapshot_url, timeout=(2, 5), stream=False)
|
||||
logger.info(f"Camera {camera_id}: Connectivity test result: {test_response.status_code}")
|
||||
except Exception as test_error:
|
||||
logger.warning(f"Camera {camera_id}: Connectivity test failed: {test_error}")
|
||||
|
||||
if retries > max_retries and max_retries != -1:
|
||||
logger.error(f"Max retries reached for snapshot camera: {camera_id}, stopping reader")
|
||||
break
|
||||
time.sleep(min(interval_seconds, reconnect_interval))
|
||||
|
||||
# Exponential backoff based on consecutive failures
|
||||
backoff_delay = min(30, max(1, min(2 ** min(consecutive_failures - 1, 6), interval_seconds * 2))) # Start with 1s, max 30s
|
||||
logger.debug(f"Camera {camera_id}: Backing off for {backoff_delay:.1f}s (consecutive failures: {consecutive_failures})")
|
||||
if stop_event.wait(backoff_delay): # Use wait with timeout instead of sleep
|
||||
break # Exit if stop_event is set during backoff
|
||||
continue
|
||||
|
||||
# Successfully fetched a frame
|
||||
# Successfully fetched a frame - reset consecutive failures
|
||||
consecutive_failures = 0 # Reset backoff on success
|
||||
frame_count += 1
|
||||
current_time = time.time()
|
||||
# Log frame stats every 5 seconds
|
||||
|
@ -503,12 +563,18 @@ async def detect(websocket: WebSocket):
|
|||
time.sleep(sleep_time)
|
||||
|
||||
except Exception as e:
|
||||
consecutive_failures += 1
|
||||
logger.error(f"Unexpected error fetching snapshot for camera {camera_id}: {str(e)}", exc_info=True)
|
||||
retries += 1
|
||||
if retries > max_retries and max_retries != -1:
|
||||
logger.error(f"Max retries reached after error for snapshot camera {camera_id}")
|
||||
break
|
||||
time.sleep(min(interval_seconds, reconnect_interval))
|
||||
|
||||
# Exponential backoff for exceptions too
|
||||
backoff_delay = min(30, max(1, min(2 ** min(consecutive_failures - 1, 6), interval_seconds * 2))) # Start with 1s, max 30s
|
||||
logger.debug(f"Camera {camera_id}: Exception backoff for {backoff_delay:.1f}s (consecutive failures: {consecutive_failures})")
|
||||
if stop_event.wait(backoff_delay): # Use wait with timeout instead of sleep
|
||||
break # Exit if stop_event is set during backoff
|
||||
except Exception as e:
|
||||
logger.error(f"Error in snapshot_reader thread for camera {camera_id}: {str(e)}", exc_info=True)
|
||||
finally:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue