From b5e6ae3918f2ad46ab4d0db03a0bd64b608f89ff Mon Sep 17 00:00:00 2001 From: Spencer Date: Tue, 19 Aug 2025 15:37:16 -0500 Subject: [PATCH] Somehow, working perfectly --- CONSTRAINTS.md | 4 +- PROGRESS.md | 11 ++--- __pycache__/config.cpython-313.pyc | Bin 721 -> 721 bytes data_storage.py | 4 +- log_position.txt | 1 + monitor_agent.py | 76 ++++++++++++++++------------- requirements.txt | 3 +- 7 files changed, 54 insertions(+), 45 deletions(-) create mode 100644 log_position.txt diff --git a/CONSTRAINTS.md b/CONSTRAINTS.md index bd9bdf7..7bb5d76 100644 --- a/CONSTRAINTS.md +++ b/CONSTRAINTS.md @@ -12,4 +12,6 @@ - 120.48.49.12 - 23.137.255.140 - 101.89.148.7 -- The Only IP's that have SSH access are 192.168.2.0/24 and 100.64.0.0/10 +- On August 18th, 2025, a firewall was set up to prevent all unknown IP's from SSH access. The Only IP's that have SSH access are 192.168.2.0/24 and 100.64.0.0/10 +- Please do not report on anything that is older then 48 hours. + diff --git a/PROGRESS.md b/PROGRESS.md index 5638dd3..ea04230 100644 --- a/PROGRESS.md +++ b/PROGRESS.md @@ -36,9 +36,8 @@ ## TODO [x] Ensure we aren't using mockdata for get_system_logs() and get_network_metrics() - [x] Using live data, but need to resolve errors before moving on -[ ] Improve `get_system_logs()` to read new lines since last check -[ ] Improve `get_network_metrics()` by using a library like `pingparsing` -[ ] Ensure we are including CONSTRAINTS.md in our analyze_data_with_llm() function -[ ] Summarize entire report into a single sentence to said to Home Assistant - [ ] Figure out why Home Assitant isn't using the speaker \ No newline at end of file +[x] Improve `get_system_logs()` to read new lines since last check +[x] Improve `get_network_metrics()` by using a library like `pingparsing` +[x] Ensure we are including CONSTRAINTS.md in our analyze_data_with_llm() function +[x] Summarize entire report into a single sentence to said to Home Assistant +[x] Figure out why Home Assitant isn't using the speaker \ No newline at end of file diff --git a/__pycache__/config.cpython-313.pyc b/__pycache__/config.cpython-313.pyc index 1039e915ddf236299f749b41f24c0525617e0180..aaad336383c4ff7a69d55dd00d030e10b2ed67b0 100644 GIT binary patch delta 38 rcmcb}dXbg)GcPX}0}w2_vn1o`M&2!qjBc9`F|spq2sLsRaRMa(?DGnj delta 38 rcmcb}dXbg)GcPX}0}yE3T9WZ(BkvYQ#*od27}*&)1RFVvIDrxX;f@L? diff --git a/data_storage.py b/data_storage.py index 9a27232..bebe973 100644 --- a/data_storage.py +++ b/data_storage.py @@ -29,8 +29,8 @@ def calculate_baselines(): return {} baseline_metrics = { - 'avg_rtt': sum(d['network_metrics']['round_trip_ms_avg'] for d in recent_data if d['network_metrics']['round_trip_ms_avg'] != "N/A") / len(recent_data), - 'packet_loss': sum(d['network_metrics']['packet_loss_percent'] for d in recent_data if d['network_metrics']['packet_loss_percent'] != "N/A") / len(recent_data), + 'avg_rtt': sum(d['network_metrics']['rtt_avg'] for d in recent_data if 'rtt_avg' in d['network_metrics']) / len(recent_data), + 'packet_loss': sum(d['network_metrics']['packet_loss_rate'] for d in recent_data if 'packet_loss_rate' in d['network_metrics']) / len(recent_data), 'avg_cpu_temp': sum(d['cpu_temperature']['cpu_temperature'] for d in recent_data if d['cpu_temperature']['cpu_temperature'] != "N/A") / len(recent_data), 'avg_gpu_temp': sum(d['gpu_temperature']['gpu_temperature'] for d in recent_data if d['gpu_temperature']['gpu_temperature'] != "N/A") / len(recent_data), } diff --git a/log_position.txt b/log_position.txt new file mode 100644 index 0000000..f5b7e55 --- /dev/null +++ b/log_position.txt @@ -0,0 +1 @@ +11989560 \ No newline at end of file diff --git a/monitor_agent.py b/monitor_agent.py index 7c17ef5..5f56c43 100644 --- a/monitor_agent.py +++ b/monitor_agent.py @@ -8,21 +8,35 @@ from discord_webhook import DiscordWebhook import requests import data_storage import re +import os from datetime import datetime, timezone +import pingparsing # Load configuration import config from syslog_rfc5424_parser import parser +LOG_POSITION_FILE = 'log_position.txt' + # --- Data Ingestion & Parsing Functions --- def get_system_logs(): - """Gets the last 100 lines of /var/log/syslog and parses them.""" + """Gets new lines from /var/log/syslog since the last check.""" try: + last_position = 0 + if os.path.exists(LOG_POSITION_FILE): + with open(LOG_POSITION_FILE, 'r') as f: + last_position = int(f.read()) + with open("/var/log/syslog", "r") as f: - log_lines = f.readlines()[-100:] - + f.seek(last_position) + log_lines = f.readlines() + current_position = f.tell() + + with open(LOG_POSITION_FILE, 'w') as f: + f.write(str(current_position)) + parsed_logs = [] for line in log_lines: try: @@ -30,7 +44,7 @@ def get_system_logs(): except Exception: # If parsing fails, just append the raw line parsed_logs.append({"raw_log": line.strip()}) - + return {"syslog": parsed_logs} except FileNotFoundError: print("Error: /var/log/syslog not found.") @@ -39,38 +53,19 @@ def get_system_logs(): print(f"Error reading syslog: {e}") return {"syslog": []} +import pingparsing + def get_network_metrics(): """Gets network metrics by pinging 8.8.8.8.""" try: - ping_output = subprocess.check_output(["ping", "-c", "3", "8.8.8.8"], text=True) - # Use regex to parse the output - packets_transmitted_match = re.search(r"(\d+) packets transmitted", ping_output) - packets_received_match = re.search(r"(\d+) received", ping_output) - packet_loss_match = re.search(r"(\d+\.?\d*)% packet loss", ping_output) - rtt_match = re.search(r"rtt min/avg/max/mdev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output) - - if packets_transmitted_match and packets_received_match and packet_loss_match and rtt_match: - return { - "packets_transmitted": int(packets_transmitted_match.group(1)), - "packets_received": int(packets_received_match.group(1)), - "packet_loss_percent": float(packet_loss_match.group(1)), - "round_trip_ms_avg": float(rtt_match.group(2)), - } - else: - # Try to parse the summary line for different ping versions - summary_match = re.search(r"(\d+) packets transmitted, (\d+) received, (\d+\.?\d*)% packet loss", ping_output) - if summary_match: - rtt_match = re.search(r"round-trip min/avg/max/stddev = [\d\.]+/([\d\.]+)/[\d\.]+/([\d\.]+)\s ms", ping_output) - avg_rtt = float(rtt_match.group(1)) if rtt_match else "N/A" - return { - "packets_transmitted": int(summary_match.group(1)), - "packets_received": int(summary_match.group(2)), - "packet_loss_percent": float(summary_match.group(3)), - "round_trip_ms_avg": avg_rtt, - } - return {"error": "Failed to parse ping output"} - except (subprocess.CalledProcessError, FileNotFoundError): - print("Error: 'ping' command not found or failed to execute.") + ping_parser = pingparsing.PingParsing() + transmitter = pingparsing.PingTransmitter() + transmitter.destination = "8.8.8.8" + transmitter.count = 3 + result = transmitter.ping() + return ping_parser.parse(result).as_dict() + except Exception as e: + print(f"Error getting network metrics: {e}") return {"error": "ping command failed"} def get_cpu_temperature(): @@ -129,6 +124,9 @@ def get_login_attempts(): def analyze_data_with_llm(data, baselines): """Analyzes data with the local LLM.""" + with open("CONSTRAINTS.md", "r") as f: + constraints = f.read() + prompt = f""" **Role:** You are a dedicated and expert system administrator. Your primary role is to identify anomalies and provide concise, actionable reports. @@ -140,6 +138,9 @@ def analyze_data_with_llm(data, baselines): **Historical Baseline Data:** {json.dumps(baselines, indent=2)} + **Constraints and Guidelines:** + {constraints} + **Output Request:** If you find an anomaly, provide a report as a single, coherent, natural language paragraph. The report must clearly state the anomaly, its potential cause, and its severity (e.g., high, medium, low). If no anomaly is found, respond with "OK". **Reasoning Hint:** Think step by step to come to your conclusion. This is very important. @@ -169,7 +170,12 @@ def send_discord_alert(message): def send_google_home_alert(message): """Sends an alert to a Google Home speaker via Home Assistant.""" # Simplify the message for better TTS delivery - simplified_message = message.split('.')[0] # Take the first sentence + try: + response = ollama.generate(model="llama3.1:8b", prompt=f"Summarize the following message in a single sentence: {message}") + simplified_message = response['response'].strip() + except Exception as e: + print(f"Error summarizing message: {e}") + simplified_message = message.split('.')[0] # Take the first sentence as a fallback url = f"{config.HOME_ASSISTANT_URL}/api/services/tts/speak" headers = { @@ -177,7 +183,7 @@ def send_google_home_alert(message): "Content-Type": "application/json", } data = { - "entity_id": "tts.google_en_com", + "entity_id": "all", "media_player_entity_id": config.GOOGLE_HOME_SPEAKER_ID, "message": simplified_message, } diff --git a/requirements.txt b/requirements.txt index a03b1e7..dc93395 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,5 @@ ollama discord-webhook requests syslog-rfc5424-parser -apachelogs \ No newline at end of file +apachelogs +pingparsing \ No newline at end of file