feat: Implemented data storage and baselining
This commit is contained in:
@@ -1,59 +1,36 @@
|
||||
# Data Storage for the LLM-Powered Monitoring Agent
|
||||
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
DATA_FILE = "historical_data.json"
|
||||
DATA_FILE = 'monitoring_data.json'
|
||||
|
||||
def store_data(data):
|
||||
"""Stores data in a JSON file."""
|
||||
try:
|
||||
with open(DATA_FILE, 'r+') as f:
|
||||
try:
|
||||
historical_data = json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
historical_data = []
|
||||
historical_data.append(data)
|
||||
f.seek(0)
|
||||
json.dump(historical_data, f, indent=2)
|
||||
except FileNotFoundError:
|
||||
with open(DATA_FILE, 'w') as f:
|
||||
json.dump([data], f, indent=2)
|
||||
|
||||
def get_historical_data():
|
||||
"""Retrieves historical data from the JSON file."""
|
||||
try:
|
||||
def load_data():
|
||||
if os.path.exists(DATA_FILE):
|
||||
with open(DATA_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return []
|
||||
return []
|
||||
|
||||
def store_data(new_data):
|
||||
data = load_data()
|
||||
data.append(new_data)
|
||||
with open(DATA_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
def calculate_baselines():
|
||||
"""Calculates baseline averages for network metrics."""
|
||||
historical_data = get_historical_data()
|
||||
if not historical_data:
|
||||
return None
|
||||
data = load_data()
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
# Calculate average network metrics
|
||||
total_packets_transmitted = 0
|
||||
total_packets_received = 0
|
||||
total_packet_loss_percent = 0
|
||||
total_round_trip_ms_avg = 0
|
||||
count = 0
|
||||
# For simplicity, we'll average the last 24 hours of data
|
||||
# More complex logic can be added here
|
||||
recent_data = [d for d in data if datetime.fromisoformat(d['system_logs']['timestamp'].replace('Z', '')) > datetime.now() - timedelta(hours=24)]
|
||||
|
||||
for data in historical_data:
|
||||
if "network_metrics" in data and data["network_metrics"]:
|
||||
total_packets_transmitted += data["network_metrics"].get("packets_transmitted", 0) or 0
|
||||
total_packets_received += data["network_metrics"].get("packets_received", 0) or 0
|
||||
total_packet_loss_percent += data["network_metrics"].get("packet_loss_percent", 0) or 0
|
||||
total_round_trip_ms_avg += data["network_metrics"].get("round_trip_ms_avg", 0) or 0
|
||||
count += 1
|
||||
if not recent_data:
|
||||
return {}
|
||||
|
||||
if count == 0:
|
||||
return None
|
||||
|
||||
return {
|
||||
"avg_packets_transmitted": total_packets_transmitted / count,
|
||||
"avg_packets_received": total_packets_received / count,
|
||||
"avg_packet_loss_percent": total_packet_loss_percent / count,
|
||||
"avg_round_trip_ms_avg": total_round_trip_ms_avg / count,
|
||||
baseline_metrics = {
|
||||
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data) / len(recent_data),
|
||||
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data) / len(recent_data),
|
||||
}
|
||||
|
||||
return baseline_metrics
|
||||
Reference in New Issue
Block a user