from common_code.settings import get_logger import logging logger = get_logger('live_inference', file_path='/var/log/live_inference.log', stdout=True, level = logging.WARN) from utils import run_model, start_snap_manager, kafka_poster import threading import queue import multiprocessing import time all_cameras_config = { "camera_sidefeeder": { 'ip': "192.168.1.157", 'resolution': (360, 640, 3) }, "camera_driveway": { 'ip': "192.168.1.152", 'resolution': (480, 640, 3) }, "camera_railing": { 'ip': "192.168.1.153", 'resolution': (512, 896, 3) }, "camera_ptz": { 'ip': "192.168.1.155", 'resolution': (360, 640, 3) }, "camera_wrenwatch": { 'ip': "192.168.1.158", 'resolution': (360, 640, 3) }, "camera_backyard": { 'ip': "192.168.1.162", 'resolution': (432, 1536, 3), 'split_into_two': True }, } for cam, details in all_cameras_config.items(): details[ 'url_rtsp'] = f"rtsp://admin:marybear@{details['ip']}:554/h264Preview_01_sub" details['url_api'] = f"http://{details['ip']}/cgi-bin/api.cgi" details['username'] = 'admin' details['password'] = 'marybear' details['camera_name'] = cam # %% if False: details = all_cameras_config['camera_driveway'] url = details['url_api'] username = 'admin' password = 'marybear' from utils import get_snap cc = get_snap(url, username, password) # %% cameras_config = dict() #cameras_config['camera_railing'] = all_cameras_config['camera_railing'] cameras_config = all_cameras_config def start_system(): # Create shared multiprocessing queue for model img_scoring_queue = multiprocessing.Queue(maxsize=len(cameras_config) * 2) kafka_results_queue = multiprocessing.Queue() # Create threading queues for each camera for cam, details in cameras_config.items(): details['msg_queue'] = queue.Queue(maxsize=1) details['img_scoring_queue'] = img_scoring_queue details['kafka_queue'] = kafka_results_queue # Start model process kafka_process = multiprocessing.Process(target=kafka_poster, args=(kafka_results_queue, )) kafka_process.daemon = True kafka_process.start() logger.info('Started kafka poster process') model_process = multiprocessing.Process(target=run_model, args=(img_scoring_queue,kafka_results_queue )) model_process.daemon = True model_process.start() logger.info('Started model process') # Start camera threads camera_threads = [] for cam_name, details in cameras_config.items(): thread = threading.Thread(target=start_snap_manager, kwargs=details) thread.daemon = True thread.start() camera_threads.append(thread) logger.info(f'Started camera thread for {cam_name}') logger.info("System started. Available cameras:") for cam_name in cameras_config.keys(): logger.info(f" - {cam_name}") # Auto-capture loop - continuously send get messages when queues are empty msg_counts = dict() last_save = dict() for cam_name in cameras_config: msg_counts[cam_name] = 0 last_save[cam_name] = 0 save_interval = 60 try: while True: for cam_name, details in cameras_config.items(): try: if details['msg_queue'].empty(): c_time = time.time() cmd_suffix = '' if (c_time - last_save[cam_name]) > save_interval: cmd_suffix = '+save' last_save[cam_name] = c_time details['msg_queue'].put('get' + cmd_suffix, block=False) msg_counts[cam_name] += 1 logger.debug(f"Auto-sent 'get' to {cam_name}") except queue.Full: pass # Queue full, skip except Exception as e: logger.error(f"Error auto-sending to {cam_name}: {e}") time.sleep(1) # Small delay except KeyboardInterrupt: logger.info("Shutting down...") # Send exit messages to all cameras for details in cameras_config.values(): try: details['msg_queue'].put('exit', block=False) except: pass # Terminate model process if model_process.is_alive(): model_process.terminate() model_process.join(timeout=5) if model_process.is_alive(): model_process.kill() if __name__ == "__main__": multiprocessing.set_start_method('spawn', force=True) # Ensure compatibility start_system()