| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242 |
- """
- System Monitor Module
- Monitors system status including:
- - Camera applications (running/disconnected)
- - AI model load state
- - GPU usage and information
- - RAM usage
- """
- import psutil
- import logging
- from typing import Dict, Optional, Any
- from utils.process_utils import check_camera_applications
- logger = logging.getLogger(__name__)
- def get_camera_status() -> Dict[str, Dict[str, Any]]:
- """
- Get status of all camera applications.
-
- Returns:
- Dict mapping camera app name to dict with 'running' status and spec info
- Example: {
- 'EOS Utility': {'running': True, 'spec': '1920x1080 @ 30fps'},
- '2nd Look': {'running': False, 'spec': 'DISCONNECTED'},
- 'AnalyzIR': {'running': True, 'spec': 'Thermal @ 60fps'}
- }
- """
- try:
- # Check which apps are running
- app_status = check_camera_applications()
- print(f"[system_monitor] check_camera_applications returned: {app_status}")
- logger.info(f"Camera app status: {app_status}")
-
- # Define specs for each camera app
- camera_specs = {
- 'EOS Utility': '1920x1080 @ 30fps',
- '2nd Look': '8-band Near Infrared Multispectral Camera',
- 'AnalyzIR': 'FOTRIC 323F 264*198 Thermal Imaging Camera'
- }
-
- audio_spec = '44.1kHz, 16-bit'
-
- result = {}
-
- # Build status dict for each camera app
- for app_name, running in app_status.items():
- spec = camera_specs.get(app_name, 'Unknown')
- result[app_name] = {
- 'running': running,
- 'spec': spec if running else 'DISCONNECTED'
- }
-
- # Note: Audio system is checked via EOS Utility presence
- # Audio is considered connected if EOS Utility is running
- result['Audio System'] = {
- 'running': app_status.get('EOS Utility', False),
- 'spec': audio_spec if app_status.get('EOS Utility', False) else 'DISCONNECTED'
- }
-
- print(f"[system_monitor] Returning camera status: {result}")
- return result
-
- except Exception as e:
- print(f"[system_monitor] ERROR in get_camera_status: {e}")
- logger.error(f"Error getting camera status: {e}", exc_info=True)
- return {}
- def get_model_load_status(models: Optional[Dict[str, Any]] = None) -> Dict[str, Dict[str, str]]:
- """
- Get load status of all AI models.
-
- Args:
- models: Dictionary of model instances from main_window
- If None, returns all models as 'Not Available'
-
- Returns:
- Dict mapping model name to dict with 'status' and 'info'
- Example: {
- 'Ripeness': {'status': 'online', 'info': 'Loaded'},
- 'Quality': {'status': 'offline', 'info': 'Failed'},
- 'Defect': {'status': 'online', 'info': 'Loaded'},
- 'Maturity': {'status': 'online', 'info': 'Loaded'},
- 'Shape': {'status': 'offline', 'info': 'Not Loaded'}
- }
- """
- try:
- model_names = {
- 'audio': 'Ripeness',
- 'defect': 'Quality',
- 'locule': 'Defect',
- 'maturity': 'Maturity',
- 'shape': 'Shape'
- }
-
- result = {}
-
- for model_key, display_name in model_names.items():
- if not models or models.get(model_key) is None:
- result[display_name] = {
- 'status': 'offline',
- 'info': 'Not Loaded'
- }
- elif hasattr(models[model_key], 'is_loaded') and models[model_key].is_loaded:
- result[display_name] = {
- 'status': 'online',
- 'info': 'Loaded'
- }
- else:
- result[display_name] = {
- 'status': 'offline',
- 'info': 'Failed'
- }
-
- return result
-
- except Exception as e:
- logger.error(f"Error getting model load status: {e}", exc_info=True)
- return {}
- def get_gpu_info() -> Dict[str, Any]:
- """
- Get GPU information and usage.
-
- Returns:
- Dict with GPU info:
- {
- 'available': True/False,
- 'name': 'NVIDIA RTX 3080',
- 'usage_percent': 45.2,
- 'vram_used_gb': 6.2,
- 'vram_total_gb': 12.0,
- 'display': 'NVIDIA RTX 3080 | Usage: 45% (6.2GB/12GB)'
- }
- """
- try:
- import torch
-
- if not torch.cuda.is_available():
- return {
- 'available': False,
- 'name': 'Not Available',
- 'usage_percent': 0,
- 'vram_used_gb': 0,
- 'vram_total_gb': 0,
- 'display': 'GPU: Not Available (Using CPU)'
- }
-
- # Get GPU name
- gpu_name = torch.cuda.get_device_name(0)
-
- # Get VRAM info
- total_vram = torch.cuda.get_device_properties(0).total_memory / (1024**3) # Convert to GB
- reserved_vram = torch.cuda.memory_reserved(0) / (1024**3)
- allocated_vram = torch.cuda.memory_allocated(0) / (1024**3)
- used_vram = max(reserved_vram, allocated_vram)
-
- # Calculate percentage
- vram_percent = (used_vram / total_vram * 100) if total_vram > 0 else 0
-
- display_str = f"GPU: {gpu_name} | Usage: {vram_percent:.0f}% ({used_vram:.1f}GB/{total_vram:.1f}GB)"
-
- return {
- 'available': True,
- 'name': gpu_name,
- 'usage_percent': vram_percent,
- 'vram_used_gb': used_vram,
- 'vram_total_gb': total_vram,
- 'display': display_str
- }
-
- except Exception as e:
- print(f"Error getting GPU info: {e}")
- return {
- 'available': False,
- 'name': 'Error',
- 'usage_percent': 0,
- 'vram_used_gb': 0,
- 'vram_total_gb': 0,
- 'display': 'GPU: Error retrieving info'
- }
- def get_ram_info() -> Dict[str, Any]:
- """
- Get system RAM usage information.
-
- Returns:
- Dict with RAM info:
- {
- 'usage_percent': 62.5,
- 'used_gb': 8.0,
- 'total_gb': 16.0,
- 'display': 'RAM: 62% (8.0GB/16.0GB)'
- }
- """
- try:
- virtual_memory = psutil.virtual_memory()
-
- usage_percent = virtual_memory.percent
- used_gb = virtual_memory.used / (1024**3) # Convert bytes to GB
- total_gb = virtual_memory.total / (1024**3)
-
- display_str = f"RAM: {usage_percent:.0f}% ({used_gb:.1f}GB/{total_gb:.1f}GB)"
-
- return {
- 'usage_percent': usage_percent,
- 'used_gb': used_gb,
- 'total_gb': total_gb,
- 'display': display_str
- }
-
- except Exception as e:
- print(f"Error getting RAM info: {e}")
- return {
- 'usage_percent': 0,
- 'used_gb': 0,
- 'total_gb': 0,
- 'display': 'RAM: Error retrieving info'
- }
- def get_full_system_status(models: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
- """
- Get complete system status snapshot.
-
- Args:
- models: Dictionary of model instances from main_window
-
- Returns:
- Dict with all system status information
- """
- return {
- 'cameras': get_camera_status(),
- 'models': get_model_load_status(models),
- 'gpu': get_gpu_info(),
- 'ram': get_ram_info()
- }
|