Fix: got data from backend

This commit is contained in:
ziesorx 2025-09-13 01:00:49 +07:00
parent 42a8325faf
commit 086ba0e4d3
4 changed files with 107 additions and 12 deletions

View file

@ -7,6 +7,7 @@ and coordination between stream processing and detection pipelines.
import asyncio
import json
import logging
import os
import time
import traceback
import uuid
@ -429,11 +430,38 @@ class WebSocketHandler:
print(f"📦 Subscription {i+1}: {sub_id} | Model {model_id}")
# Track unique models for download
# Track unique models for download - check if model already exists locally
if model_id and model_url:
if model_id not in unique_models:
unique_models[model_id] = model_url
print(f"🎯 New model found: ID {model_id}")
# Check if model directory already exists on disk
from ..core.config import MODELS_DIR
model_dir = os.path.join(MODELS_DIR, str(model_id))
print(f"🔍 Checking model directory: {model_dir}")
logger.info(f"Checking if model {model_id} exists at: {model_dir}")
if os.path.exists(model_dir) and os.path.isdir(model_dir):
# Check if directory has content (not empty)
dir_contents = os.listdir(model_dir)
actual_contents = [f for f in dir_contents if not f.startswith('.')]
print(f"📋 Directory contents: {dir_contents}")
print(f"📋 Filtered contents: {actual_contents}")
logger.info(f"Model {model_id} directory contents: {actual_contents}")
if actual_contents:
print(f"📁 Model {model_id} already exists locally, skipping download")
logger.info(f"Skipping download for model {model_id} - already exists")
else:
print(f"📁 Model {model_id} directory exists but empty, will download")
unique_models[model_id] = model_url
print(f"🎯 New model found: ID {model_id}")
logger.info(f"Model {model_id} directory empty, adding to download queue")
else:
print(f"📁 Model {model_id} directory does not exist, will download")
unique_models[model_id] = model_url
print(f"🎯 New model found: ID {model_id}")
logger.info(f"Model {model_id} directory not found, adding to download queue")
else:
print(f"🔄 Model {model_id} already tracked")