Files
LLM-Powered-Monitoring-Agent/data_storage.py
2025-08-19 15:11:48 -05:00

38 lines
1.5 KiB
Python

import json
import os
from datetime import datetime, timedelta, timezone
DATA_FILE = 'monitoring_data.json'
def load_data():
if os.path.exists(DATA_FILE):
with open(DATA_FILE, 'r') as f:
return json.load(f)
return []
def store_data(new_data):
data = load_data()
data.append(new_data)
with open(DATA_FILE, 'w') as f:
json.dump(data, f, indent=4)
def calculate_baselines():
data = load_data()
if not data:
return {}
# For simplicity, we'll average the last 24 hours of data
# More complex logic can be added here
recent_data = [d for d in data if 'timestamp' in d and datetime.fromisoformat(d['timestamp'].replace('Z', '')).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc) - timedelta(hours=24)]
if not recent_data:
return {}
baseline_metrics = {
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data if d['network_metrics']['round_trip_ms_avg'] != "N/A") / len(recent_data),
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data if d['network_metrics']['packet_loss_percent'] != "N/A") / len(recent_data),
'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data),
'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data),
}
return baseline_metrics