Files
LLM-Powered-Monitoring-Agent/data_storage.py
2025-08-18 14:32:39 -05:00

38 lines
1.2 KiB
Python

import json
import os
from datetime import datetime, timedelta
DATA_FILE = 'monitoring_data.json'
def load_data():
if os.path.exists(DATA_FILE):
with open(DATA_FILE, 'r') as f:
return json.load(f)
return []
def store_data(new_data):
data = load_data()
data.append(new_data)
with open(DATA_FILE, 'w') as f:
json.dump(data, f, indent=4)
def calculate_baselines():
data = load_data()
if not data:
return {}
# For simplicity, we'll average the last 24 hours of data
# More complex logic can be added here
recent_data = [d for d in data if datetime.fromisoformat(d['system_logs']['timestamp'].replace('Z', '')) > datetime.now() - timedelta(hours=24)]
if not recent_data:
return {}
baseline_metrics = {
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data) / len(recent_data),
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data) / len(recent_data),
'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data) / len(recent_data),
'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data) / len(recent_data),
}
return baseline_metrics