Working with Live Data
This commit is contained in:
@@ -11,3 +11,5 @@
|
||||
- 45.88.8.186
|
||||
- 120.48.49.12
|
||||
- 23.137.255.140
|
||||
- 101.89.148.7
|
||||
- The Only IP's that have SSH access are 192.168.2.0/24 and 100.64.0.0/10
|
||||
|
||||
@@ -35,7 +35,10 @@
|
||||
|
||||
## TODO
|
||||
|
||||
[ ] Ensure we aren't using mockdata for get_system_logs() and get_network_metrics()
|
||||
[x] Ensure we aren't using mockdata for get_system_logs() and get_network_metrics()
|
||||
[x] Using live data, but need to resolve errors before moving on
|
||||
[ ] Improve `get_system_logs()` to read new lines since last check
|
||||
[ ] Improve `get_network_metrics()` by using a library like `pingparsing`
|
||||
[ ] Ensure we are including CONSTRAINTS.md in our analyze_data_with_llm() function
|
||||
[ ] Summarize entire report into a single sentence to said to Home Assistant
|
||||
[ ] Figure out why Home Assitant isn't using the speaker
|
||||
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
DATA_FILE = 'monitoring_data.json'
|
||||
|
||||
@@ -23,16 +23,16 @@ def calculate_baselines():
|
||||
|
||||
# For simplicity, we'll average the last 24 hours of data
|
||||
# More complex logic can be added here
|
||||
recent_data = [d for d in data if datetime.fromisoformat(d['system_logs']['timestamp'].replace('Z', '')) > datetime.now() - timedelta(hours=24)]
|
||||
recent_data = [d for d in data if 'timestamp' in d and datetime.fromisoformat(d['timestamp'].replace('Z', '')).replace(tzinfo=timezone.utc) > datetime.now(timezone.utc) - timedelta(hours=24)]
|
||||
|
||||
if not recent_data:
|
||||
return {}
|
||||
|
||||
baseline_metrics = {
|
||||
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data) / len(recent_data),
|
||||
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data) / len(recent_data),
|
||||
'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data) / len(recent_data),
|
||||
'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data) / len(recent_data),
|
||||
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data if d['network_metrics']['round_trip_ms_avg'] != "N/A") / len(recent_data),
|
||||
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data if d['network_metrics']['packet_loss_percent'] != "N/A") / len(recent_data),
|
||||
'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data),
|
||||
'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data),
|
||||
}
|
||||
|
||||
return baseline_metrics
|
||||
@@ -7,42 +7,71 @@ import ollama
|
||||
from discord_webhook import DiscordWebhook
|
||||
import requests
|
||||
import data_storage
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Load configuration
|
||||
import config
|
||||
|
||||
from syslog_rfc5424_parser import parser
|
||||
|
||||
# --- Data Ingestion & Parsing Functions ---
|
||||
|
||||
def get_system_logs():
|
||||
"""Simulates collecting and parsing system logs."""
|
||||
# Mock log entry for demonstration
|
||||
mock_log_entry = '{"timestamp": "2025-08-15T12:00:00Z", "log": "Failed login attempt for user \'root\' from 10.0.0.1"}'
|
||||
"""Gets the last 100 lines of /var/log/syslog and parses them."""
|
||||
try:
|
||||
parsed_log = json.loads(mock_log_entry)
|
||||
return parsed_log
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing system log: {e}")
|
||||
return None
|
||||
with open("/var/log/syslog", "r") as f:
|
||||
log_lines = f.readlines()[-100:]
|
||||
|
||||
parsed_logs = []
|
||||
for line in log_lines:
|
||||
try:
|
||||
parsed_logs.append(parser.parse(line).as_dict())
|
||||
except Exception:
|
||||
# If parsing fails, just append the raw line
|
||||
parsed_logs.append({"raw_log": line.strip()})
|
||||
|
||||
return {"syslog": parsed_logs}
|
||||
except FileNotFoundError:
|
||||
print("Error: /var/log/syslog not found.")
|
||||
return {"syslog": []}
|
||||
except Exception as e:
|
||||
print(f"Error reading syslog: {e}")
|
||||
return {"syslog": []}
|
||||
|
||||
def get_network_metrics():
|
||||
"""Simulates collecting and parsing network data."""
|
||||
# Mock ping output for demonstration
|
||||
mock_ping_output = '''{"destination_ip":"8.8.8.8","data_bytes":56,"pattern":null,"destination":"8.8.8.8","duplicates":0,"packets_transmitted":3,"packets_received":3,"packet_loss_percent":0.0,"time_ms":2003.0,"round_trip_ms_min":18.79,"round_trip_ms_avg":21.212,"round_trip_ms_max":22.787,"round_trip_ms_stddev":1.738,"responses":[{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":1,"ttl":111,"time_ms":18.8,"duplicate":false},{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":2,"ttl":111,"time_ms":22.8,"duplicate":false},{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":3,"ttl":111,"time_ms":22.1,"duplicate":false}]}'''
|
||||
"""Gets network metrics by pinging 8.8.8.8."""
|
||||
try:
|
||||
parsed_ping = json.loads(mock_ping_output)
|
||||
if parsed_ping:
|
||||
return {
|
||||
"packets_transmitted": parsed_ping.get("packets_transmitted"),
|
||||
"packets_received": parsed_ping.get("packets_received"),
|
||||
"packet_loss_percent": parsed_ping.get("packet_loss_percent"),
|
||||
"round_trip_ms_avg": parsed_ping.get("round_trip_ms_avg"),
|
||||
}
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing network metrics: {e}")
|
||||
return None
|
||||
ping_output = subprocess.check_output(["ping", "-c", "3", "8.8.8.8"], text=True)
|
||||
# Use regex to parse the output
|
||||
packets_transmitted_match = re.search(r"(\d+) packets transmitted", ping_output)
|
||||
packets_received_match = re.search(r"(\d+) received", ping_output)
|
||||
packet_loss_match = re.search(r"(\d+\.?\d*)% packet loss", ping_output)
|
||||
rtt_match = re.search(r"rtt min/avg/max/mdev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output)
|
||||
|
||||
import re
|
||||
if packets_transmitted_match and packets_received_match and packet_loss_match and rtt_match:
|
||||
return {
|
||||
"packets_transmitted": int(packets_transmitted_match.group(1)),
|
||||
"packets_received": int(packets_received_match.group(1)),
|
||||
"packet_loss_percent": float(packet_loss_match.group(1)),
|
||||
"round_trip_ms_avg": float(rtt_match.group(2)),
|
||||
}
|
||||
else:
|
||||
# Try to parse the summary line for different ping versions
|
||||
summary_match = re.search(r"(\d+) packets transmitted, (\d+) received, (\d+\.?\d*)% packet loss", ping_output)
|
||||
if summary_match:
|
||||
rtt_match = re.search(r"round-trip min/avg/max/stddev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output)
|
||||
avg_rtt = float(rtt_match.group(1)) if rtt_match else "N/A"
|
||||
return {
|
||||
"packets_transmitted": int(summary_match.group(1)),
|
||||
"packets_received": int(summary_match.group(2)),
|
||||
"packet_loss_percent": float(summary_match.group(3)),
|
||||
"round_trip_ms_avg": avg_rtt,
|
||||
}
|
||||
return {"error": "Failed to parse ping output"}
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
print("Error: 'ping' command not found or failed to execute.")
|
||||
return {"error": "ping command failed"}
|
||||
|
||||
def get_cpu_temperature():
|
||||
"""Gets the CPU temperature using the sensors command."""
|
||||
@@ -176,6 +205,7 @@ if __name__ == "__main__":
|
||||
|
||||
if system_logs and network_metrics:
|
||||
combined_data = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"system_logs": system_logs,
|
||||
"network_metrics": network_metrics,
|
||||
"cpu_temperature": cpu_temp,
|
||||
@@ -204,6 +234,7 @@ if __name__ == "__main__":
|
||||
|
||||
if system_logs and network_metrics:
|
||||
combined_data = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"system_logs": system_logs,
|
||||
"network_metrics": network_metrics,
|
||||
"cpu_temperature": cpu_temp,
|
||||
|
||||
Reference in New Issue
Block a user