Somehow, working perfectly

This commit is contained in:
2025-08-19 15:37:16 -05:00
parent 692e372ef3
commit b5e6ae3918
7 changed files with 54 additions and 45 deletions

View File

@@ -12,4 +12,6 @@
- 120.48.49.12 - 120.48.49.12
- 23.137.255.140 - 23.137.255.140
- 101.89.148.7 - 101.89.148.7
- The Only IP's that have SSH access are 192.168.2.0/24 and 100.64.0.0/10 - On August 18th, 2025, a firewall was set up to prevent all unknown IP's from SSH access. The Only IP's that have SSH access are 192.168.2.0/24 and 100.64.0.0/10
- Please do not report on anything that is older then 48 hours.

View File

@@ -36,9 +36,8 @@
## TODO ## TODO
[x] Ensure we aren't using mockdata for get_system_logs() and get_network_metrics() [x] Ensure we aren't using mockdata for get_system_logs() and get_network_metrics()
[x] Using live data, but need to resolve errors before moving on [x] Improve `get_system_logs()` to read new lines since last check
[ ] Improve `get_system_logs()` to read new lines since last check [x] Improve `get_network_metrics()` by using a library like `pingparsing`
[ ] Improve `get_network_metrics()` by using a library like `pingparsing` [x] Ensure we are including CONSTRAINTS.md in our analyze_data_with_llm() function
[ ] Ensure we are including CONSTRAINTS.md in our analyze_data_with_llm() function [x] Summarize entire report into a single sentence to said to Home Assistant
[ ] Summarize entire report into a single sentence to said to Home Assistant [x] Figure out why Home Assitant isn't using the speaker
[ ] Figure out why Home Assitant isn't using the speaker

Binary file not shown.

View File

@@ -29,8 +29,8 @@ def calculate_baselines():
return {} return {}
baseline_metrics = { baseline_metrics = {
'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data if d['network_metrics']['round_trip_ms_avg'] != "N/A") / len(recent_data), 'avg_rtt': sum(d['network_metrics']['rtt_avg'] for d in recent_data if 'rtt_avg' in d['network_metrics']) / len(recent_data),
'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data if d['network_metrics']['packet_loss_percent'] != "N/A") / len(recent_data), 'packet_loss': sum(d['network_metrics']['packet_loss_rate'] for d in recent_data if 'packet_loss_rate' in d['network_metrics']) / len(recent_data),
'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data), 'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data),
'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data), 'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data),
} }

1
log_position.txt Normal file
View File

@@ -0,0 +1 @@
11989560

View File

@@ -8,21 +8,35 @@ from discord_webhook import DiscordWebhook
import requests import requests
import data_storage import data_storage
import re import re
import os
from datetime import datetime, timezone from datetime import datetime, timezone
import pingparsing
# Load configuration # Load configuration
import config import config
from syslog_rfc5424_parser import parser from syslog_rfc5424_parser import parser
LOG_POSITION_FILE = 'log_position.txt'
# --- Data Ingestion & Parsing Functions --- # --- Data Ingestion & Parsing Functions ---
def get_system_logs(): def get_system_logs():
"""Gets the last 100 lines of /var/log/syslog and parses them.""" """Gets new lines from /var/log/syslog since the last check."""
try: try:
last_position = 0
if os.path.exists(LOG_POSITION_FILE):
with open(LOG_POSITION_FILE, 'r') as f:
last_position = int(f.read())
with open("/var/log/syslog", "r") as f: with open("/var/log/syslog", "r") as f:
log_lines = f.readlines()[-100:] f.seek(last_position)
log_lines = f.readlines()
current_position = f.tell()
with open(LOG_POSITION_FILE, 'w') as f:
f.write(str(current_position))
parsed_logs = [] parsed_logs = []
for line in log_lines: for line in log_lines:
try: try:
@@ -30,7 +44,7 @@ def get_system_logs():
except Exception: except Exception:
# If parsing fails, just append the raw line # If parsing fails, just append the raw line
parsed_logs.append({"raw_log": line.strip()}) parsed_logs.append({"raw_log": line.strip()})
return {"syslog": parsed_logs} return {"syslog": parsed_logs}
except FileNotFoundError: except FileNotFoundError:
print("Error: /var/log/syslog not found.") print("Error: /var/log/syslog not found.")
@@ -39,38 +53,19 @@ def get_system_logs():
print(f"Error reading syslog: {e}") print(f"Error reading syslog: {e}")
return {"syslog": []} return {"syslog": []}
import pingparsing
def get_network_metrics(): def get_network_metrics():
"""Gets network metrics by pinging 8.8.8.8.""" """Gets network metrics by pinging 8.8.8.8."""
try: try:
ping_output = subprocess.check_output(["ping", "-c", "3", "8.8.8.8"], text=True) ping_parser = pingparsing.PingParsing()
# Use regex to parse the output transmitter = pingparsing.PingTransmitter()
packets_transmitted_match = re.search(r"(\d+) packets transmitted", ping_output) transmitter.destination = "8.8.8.8"
packets_received_match = re.search(r"(\d+) received", ping_output) transmitter.count = 3
packet_loss_match = re.search(r"(\d+\.?\d*)% packet loss", ping_output) result = transmitter.ping()
rtt_match = re.search(r"rtt min/avg/max/mdev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output) return ping_parser.parse(result).as_dict()
except Exception as e:
if packets_transmitted_match and packets_received_match and packet_loss_match and rtt_match: print(f"Error getting network metrics: {e}")
return {
"packets_transmitted": int(packets_transmitted_match.group(1)),
"packets_received": int(packets_received_match.group(1)),
"packet_loss_percent": float(packet_loss_match.group(1)),
"round_trip_ms_avg": float(rtt_match.group(2)),
}
else:
# Try to parse the summary line for different ping versions
summary_match = re.search(r"(\d+) packets transmitted, (\d+) received, (\d+\.?\d*)% packet loss", ping_output)
if summary_match:
rtt_match = re.search(r"round-trip min/avg/max/stddev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output)
avg_rtt = float(rtt_match.group(1)) if rtt_match else "N/A"
return {
"packets_transmitted": int(summary_match.group(1)),
"packets_received": int(summary_match.group(2)),
"packet_loss_percent": float(summary_match.group(3)),
"round_trip_ms_avg": avg_rtt,
}
return {"error": "Failed to parse ping output"}
except (subprocess.CalledProcessError, FileNotFoundError):
print("Error: 'ping' command not found or failed to execute.")
return {"error": "ping command failed"} return {"error": "ping command failed"}
def get_cpu_temperature(): def get_cpu_temperature():
@@ -129,6 +124,9 @@ def get_login_attempts():
def analyze_data_with_llm(data, baselines): def analyze_data_with_llm(data, baselines):
"""Analyzes data with the local LLM.""" """Analyzes data with the local LLM."""
with open("CONSTRAINTS.md", "r") as f:
constraints = f.read()
prompt = f""" prompt = f"""
**Role:** You are a dedicated and expert system administrator. Your primary role is to identify anomalies and provide concise, actionable reports. **Role:** You are a dedicated and expert system administrator. Your primary role is to identify anomalies and provide concise, actionable reports.
@@ -140,6 +138,9 @@ def analyze_data_with_llm(data, baselines):
**Historical Baseline Data:** **Historical Baseline Data:**
{json.dumps(baselines, indent=2)} {json.dumps(baselines, indent=2)}
**Constraints and Guidelines:**
{constraints}
**Output Request:** If you find an anomaly, provide a report as a single, coherent, natural language paragraph. The report must clearly state the anomaly, its potential cause, and its severity (e.g., high, medium, low). If no anomaly is found, respond with "OK". **Output Request:** If you find an anomaly, provide a report as a single, coherent, natural language paragraph. The report must clearly state the anomaly, its potential cause, and its severity (e.g., high, medium, low). If no anomaly is found, respond with "OK".
**Reasoning Hint:** Think step by step to come to your conclusion. This is very important. **Reasoning Hint:** Think step by step to come to your conclusion. This is very important.
@@ -169,7 +170,12 @@ def send_discord_alert(message):
def send_google_home_alert(message): def send_google_home_alert(message):
"""Sends an alert to a Google Home speaker via Home Assistant.""" """Sends an alert to a Google Home speaker via Home Assistant."""
# Simplify the message for better TTS delivery # Simplify the message for better TTS delivery
simplified_message = message.split('.')[0] # Take the first sentence try:
response = ollama.generate(model="llama3.1:8b", prompt=f"Summarize the following message in a single sentence: {message}")
simplified_message = response['response'].strip()
except Exception as e:
print(f"Error summarizing message: {e}")
simplified_message = message.split('.')[0] # Take the first sentence as a fallback
url = f"{config.HOME_ASSISTANT_URL}/api/services/tts/speak" url = f"{config.HOME_ASSISTANT_URL}/api/services/tts/speak"
headers = { headers = {
@@ -177,7 +183,7 @@ def send_google_home_alert(message):
"Content-Type": "application/json", "Content-Type": "application/json",
} }
data = { data = {
"entity_id": "tts.google_en_com", "entity_id": "all",
"media_player_entity_id": config.GOOGLE_HOME_SPEAKER_ID, "media_player_entity_id": config.GOOGLE_HOME_SPEAKER_ID,
"message": simplified_message, "message": simplified_message,
} }

View File

@@ -2,4 +2,5 @@ ollama
discord-webhook discord-webhook
requests requests
syslog-rfc5424-parser syslog-rfc5424-parser
apachelogs apachelogs
pingparsing