Working with Live Data
This commit is contained in:
@@ -7,42 +7,71 @@ import ollama
|
||||
from discord_webhook import DiscordWebhook
|
||||
import requests
|
||||
import data_storage
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Load configuration
|
||||
import config
|
||||
|
||||
from syslog_rfc5424_parser import parser
|
||||
|
||||
# --- Data Ingestion & Parsing Functions ---
|
||||
|
||||
def get_system_logs():
|
||||
"""Simulates collecting and parsing system logs."""
|
||||
# Mock log entry for demonstration
|
||||
mock_log_entry = '{"timestamp": "2025-08-15T12:00:00Z", "log": "Failed login attempt for user \'root\' from 10.0.0.1"}'
|
||||
"""Gets the last 100 lines of /var/log/syslog and parses them."""
|
||||
try:
|
||||
parsed_log = json.loads(mock_log_entry)
|
||||
return parsed_log
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing system log: {e}")
|
||||
return None
|
||||
with open("/var/log/syslog", "r") as f:
|
||||
log_lines = f.readlines()[-100:]
|
||||
|
||||
parsed_logs = []
|
||||
for line in log_lines:
|
||||
try:
|
||||
parsed_logs.append(parser.parse(line).as_dict())
|
||||
except Exception:
|
||||
# If parsing fails, just append the raw line
|
||||
parsed_logs.append({"raw_log": line.strip()})
|
||||
|
||||
return {"syslog": parsed_logs}
|
||||
except FileNotFoundError:
|
||||
print("Error: /var/log/syslog not found.")
|
||||
return {"syslog": []}
|
||||
except Exception as e:
|
||||
print(f"Error reading syslog: {e}")
|
||||
return {"syslog": []}
|
||||
|
||||
def get_network_metrics():
|
||||
"""Simulates collecting and parsing network data."""
|
||||
# Mock ping output for demonstration
|
||||
mock_ping_output = '''{"destination_ip":"8.8.8.8","data_bytes":56,"pattern":null,"destination":"8.8.8.8","duplicates":0,"packets_transmitted":3,"packets_received":3,"packet_loss_percent":0.0,"time_ms":2003.0,"round_trip_ms_min":18.79,"round_trip_ms_avg":21.212,"round_trip_ms_max":22.787,"round_trip_ms_stddev":1.738,"responses":[{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":1,"ttl":111,"time_ms":18.8,"duplicate":false},{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":2,"ttl":111,"time_ms":22.8,"duplicate":false},{"type":"reply","timestamp":null,"bytes":64,"response_ip":"8.8.8.8","icmp_seq":3,"ttl":111,"time_ms":22.1,"duplicate":false}]}'''
|
||||
"""Gets network metrics by pinging 8.8.8.8."""
|
||||
try:
|
||||
parsed_ping = json.loads(mock_ping_output)
|
||||
if parsed_ping:
|
||||
return {
|
||||
"packets_transmitted": parsed_ping.get("packets_transmitted"),
|
||||
"packets_received": parsed_ping.get("packets_received"),
|
||||
"packet_loss_percent": parsed_ping.get("packet_loss_percent"),
|
||||
"round_trip_ms_avg": parsed_ping.get("round_trip_ms_avg"),
|
||||
}
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing network metrics: {e}")
|
||||
return None
|
||||
ping_output = subprocess.check_output(["ping", "-c", "3", "8.8.8.8"], text=True)
|
||||
# Use regex to parse the output
|
||||
packets_transmitted_match = re.search(r"(\d+) packets transmitted", ping_output)
|
||||
packets_received_match = re.search(r"(\d+) received", ping_output)
|
||||
packet_loss_match = re.search(r"(\d+\.?\d*)% packet loss", ping_output)
|
||||
rtt_match = re.search(r"rtt min/avg/max/mdev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output)
|
||||
|
||||
import re
|
||||
if packets_transmitted_match and packets_received_match and packet_loss_match and rtt_match:
|
||||
return {
|
||||
"packets_transmitted": int(packets_transmitted_match.group(1)),
|
||||
"packets_received": int(packets_received_match.group(1)),
|
||||
"packet_loss_percent": float(packet_loss_match.group(1)),
|
||||
"round_trip_ms_avg": float(rtt_match.group(2)),
|
||||
}
|
||||
else:
|
||||
# Try to parse the summary line for different ping versions
|
||||
summary_match = re.search(r"(\d+) packets transmitted, (\d+) received, (\d+\.?\d*)% packet loss", ping_output)
|
||||
if summary_match:
|
||||
rtt_match = re.search(r"round-trip min/avg/max/stddev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output)
|
||||
avg_rtt = float(rtt_match.group(1)) if rtt_match else "N/A"
|
||||
return {
|
||||
"packets_transmitted": int(summary_match.group(1)),
|
||||
"packets_received": int(summary_match.group(2)),
|
||||
"packet_loss_percent": float(summary_match.group(3)),
|
||||
"round_trip_ms_avg": avg_rtt,
|
||||
}
|
||||
return {"error": "Failed to parse ping output"}
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
print("Error: 'ping' command not found or failed to execute.")
|
||||
return {"error": "ping command failed"}
|
||||
|
||||
def get_cpu_temperature():
|
||||
"""Gets the CPU temperature using the sensors command."""
|
||||
@@ -176,6 +205,7 @@ if __name__ == "__main__":
|
||||
|
||||
if system_logs and network_metrics:
|
||||
combined_data = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"system_logs": system_logs,
|
||||
"network_metrics": network_metrics,
|
||||
"cpu_temperature": cpu_temp,
|
||||
@@ -204,6 +234,7 @@ if __name__ == "__main__":
|
||||
|
||||
if system_logs and network_metrics:
|
||||
combined_data = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"system_logs": system_logs,
|
||||
"network_metrics": network_metrics,
|
||||
"cpu_temperature": cpu_temp,
|
||||
|
||||
Reference in New Issue
Block a user