import json import os from datetime import datetime, timedelta, timezone DATA_FILE = 'monitoring_data.json' def load_data(): if os.path.exists(DATA_FILE): with open(DATA_FILE, 'r') as f: return json.load(f) return [] def store_data(new_data): data = load_data() data.append(new_data) with open(DATA_FILE, 'w') as f: json.dump(data, f, indent=4) def calculate_baselines(): data = load_data() if not data: return {} # For simplicity, we'll average the last 24 hours of data # More complex logic can be added here recent_data = [d for d in data if 'timestamp' in d and datetime.fromisoformat(d['timestamp'].replace('Z', '')).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc) - timedelta(hours=24)] if not recent_data: return {} baseline_metrics = { 'avg_rtt': sum(d['network_metrics']['rtt_avg'] for d in recent_data if 'rtt_avg' in d['network_metrics']) / len(recent_data), 'packet_loss': sum(d['network_metrics']['packet_loss_rate'] for d in recent_data if 'packet_loss_rate' in d['network_metrics']) / len(recent_data), 'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data), 'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data), } return baseline_metrics