From dca95f5780d951be57cf25c69818275dac7dbc92 Mon Sep 17 00:00:00 2001 From: Thomas Williams Date: Thu, 4 Jul 2024 16:47:37 +0100 Subject: [PATCH] Closses issue #19 --- README.md | 25 ++++----- config.py | 7 +-- log.py | 156 +++++++++++++++++++++++++++++++++++------------------- main.py | 3 +- 4 files changed, 122 insertions(+), 69 deletions(-) diff --git a/README.md b/README.md index 5cf02fb..2f57743 100644 --- a/README.md +++ b/README.md @@ -5,15 +5,16 @@ Both CPU/RAM monitoring and URL monitoring can be set on their own monitoring pe **Configuration options:** -- hostMonitoringPeriod - the delay in between the CPU and RAM usage being probed (defined in seconds). -- urlMonitoringPeriod - the delay in between monitoring all of the URLs (defined in seconds). -- urls - the list of URLs to monitor (e.g. ["url1", "url2"]). -- urlTimeout - the delay before considering a URL to have timed out. -- maxWorkers - the amount of threads to use when pulling URL resources. Do not set above the maximum number of threads on the host. -- forceNonPOSIXCPU - For POSIX compatible systems, psutil.getloadavg() is executed which relies on os.getloadavg(). For Windows, this seemingly returns 0 (at least on the version executed during development). For Windows, a custom function has been built to obtain running CPU averages, but you can choose to use this function on POSIX systems by setting this variable to True. -- loggingMode - Valid options: mssql, mariadb, rabbitmq, none. -- sqlServer - the address of the SQL server which to write the data. -- sqlDatabase - the database to write the data. -- sqlUsername - the username used to authenticate to the SQL server. -- sqlPassword - the password used to authenticate to the SQL server. -- logRetentionDays - the maximum age logs should be kept. +- **hostMonitoringPeriod** - the delay in between the CPU and RAM usage being probed (defined in seconds). +- **urlMonitoringPeriod** - the delay in between monitoring all of the URLs (defined in seconds). +- **urls** - the list of URLs to monitor (e.g. ["url1", "url2"]). +- **urlTimeout** - the delay before considering a URL to have timed out. +- **maxWorkers** - the amount of threads to use when pulling URL resources. Do not set above the maximum number of threads on the host. +- **forceNonPOSIXCPU** - For POSIX compatible systems, psutil.getloadavg() is executed which relies on os.getloadavg(). For Windows, this seemingly returns 0 (at least on the version executed during development). For Windows, a custom function has been built to obtain running CPU averages, but you can choose to use this function on POSIX systems by setting this variable to True. +- **loggingMode** - Valid options: mssql, mariadb, rabbitmq, none. +- **sqlServer** - the address of the SQL server which to write the data. +- **sqlDatabase** - the database to write the data. +- **sqlUsername** - the username used to authenticate to the SQL server. +- **sqlPassword** - the password used to authenticate to the SQL server. +- **logRetentionDays** - the maximum age logs should be kept. +- **maximumSQLAttempts** - the maximum number of attempts to try certain SQL operations diff --git a/config.py b/config.py index 7befe9e..26afbdd 100644 --- a/config.py +++ b/config.py @@ -2,8 +2,8 @@ # See README before changing any of these options. -hostMonitoringPeriod = 1 -urlMonitoringPeriod = 10 +hostMonitoringPeriod = 15 +urlMonitoringPeriod = 60 urls = ["https://1.1.1.1"] urlTimeout = 10 maxWorkers = 4 @@ -13,4 +13,5 @@ sqlServer = '' sqlDatabase = '' sqlUsername = '' sqlPassword = '' -logRetentionDays = '90' +logRetentionDays = 90 +maximumSQLAttempts = 3 diff --git a/log.py b/log.py index 857defb..7b39ce6 100644 --- a/log.py +++ b/log.py @@ -23,7 +23,8 @@ # SOFTWARE. import pyodbc -from config import logRetentionDays +import time +from config import logRetentionDays, maximumSQLAttempts from datetime import datetime, timedelta class logsManager: @@ -32,79 +33,128 @@ class logsManager: self.conn_str = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password def insertHost(self, hostname, ipAddress): - - try: - - conn = pyodbc.connect(self.conn_str) - cursor = conn.cursor() - cursor.execute("SELECT COUNT(*) FROM monutil_hosts WHERE hostname = ?", hostname) - - if cursor.fetchone()[0] == 0: + + currentAttempts = 1 + + while currentAttempts <= maximumSQLAttempts: + + try: + + conn = pyodbc.connect(self.conn_str) + cursor = conn.cursor() + cursor.execute("SELECT COUNT(*) FROM monutil_hosts WHERE hostname = ?", hostname) + + if cursor.fetchone()[0] == 0: + + cursor.execute("INSERT INTO monutil_hosts (hostname, ipAddress) VALUES (?, ?)", hostname, ipAddress) + conn.commit() + conn.close() + break + + else: + + cursor.execute("UPDATE monutil_hosts SET ipAddress = ? WHERE hostname = ?", ipAddress, hostname) + conn.close() + break + + except pyodbc.Error as ex: + + currentAttempts += 1 + print("SQL Error: {}".format(str(ex))) - cursor.execute("INSERT INTO monutil_hosts (hostname, ipAddress) VALUES (?, ?)", hostname, ipAddress) - conn.commit() + if not currentAttempts <= maximumSQLAttempts: + raise - else: + time.sleep(1) - cursor.execute("UPDATE monutil_hosts SET ipAddress = ? WHERE hostname = ?", ipAddress, hostname) - - except pyodbc.Error as ex: - - print("Error inserting data: {}".format(str(ex))) def insertHostLog(self, hostname, ipAddress, log_time, cpu, memory): - try: - - self.insertHost(hostname, ipAddress) - self.deleteOldLogs("monutil_hostLogs", "logTime") - - conn = pyodbc.connect(self.conn_str) - cursor = conn.cursor() + currentAttempts = 1 - cursor.execute("INSERT INTO monutil_hostlogs (hostname, logTime, cpu, memory) VALUES (?, ?, ?, ?)", hostname, log_time, cpu, memory) - conn.commit() - conn.close() + self.insertHost(hostname, ipAddress) + self.deleteOldLogs("monutil_hostlogs", "logTime") + + while currentAttempts <= maximumSQLAttempts: - except pyodbc.Error as ex: + try: + + conn = pyodbc.connect(self.conn_str) + cursor = conn.cursor() + + cursor.execute("INSERT INTO monutil_hostlogs (hostname, logTime, cpu, memory) VALUES (?, ?, ?, ?)", hostname, log_time, cpu, memory) + conn.commit() + conn.close() + break + + except pyodbc.Error as ex: - print("Error inserting data: {}".format(str(ex))) + currentAttempts += 1 + print("Error inserting data: {}".format(str(ex))) + + if not currentAttempts <= maximumSQLAttempts: + raise + + time.sleep(1) def insertURLLog(self, hostname, ipAddress, log_time, url, responseTime): - try: + currentAttempts = 1 - self.insertHost(hostname, ipAddress) - self.deleteOldLogs("monutil_urlLogs", "logTime") + self.insertHost(hostname, ipAddress) + self.deleteOldLogs("monutil_urllogs", "logTime") + + while currentAttempts <= maximumSQLAttempts: - conn = pyodbc.connect(self.conn_str) - cursor = conn.cursor() + try: - cursor.execute("INSERT INTO monutil_urlLogs (hostname, url, logTime, responseTime) VALUES (?, ?, ?, ?)", hostname, url, log_time, responseTime) + conn = pyodbc.connect(self.conn_str) + cursor = conn.cursor() - conn.commit() - conn.close() + cursor.execute("INSERT INTO monutil_urlLogs (hostname, url, logTime, responseTime) VALUES (?, ?, ?, ?)", hostname, url, log_time, responseTime) + conn.commit() + conn.close() + break - except pyodbc.Error as ex: + except pyodbc.Error as ex: - print("Error inserting data into monutil_urlLogs:", ex) + currentAttempts += 1 + print("Error inserting data into monutil_urlLogs:", ex) + + if not currentAttempts <= maximumSQLAttempts: + raise + + time.sleep(1) def deleteOldLogs(self, tableName, logTimeColumn): + + currentAttempts = 1 - try: - - conn = pyodbc.connect(self.conn_str) - cursor = conn.cursor() + while currentAttempts <= maximumSQLAttempts: - oldestLogQuery = f"SELECT TOP 1 {logTimeColumn} FROM {tableName} ORDER BY {logTimeColumn} ASC" - cursor.execute(oldestLogQuery) - oldestLogTime = cursor.fetchone()[0] + try: - if oldestLogTime is not None and oldestLogTime < datetime.now() - timedelta(days=int(logRetentionDays)): + conn = pyodbc.connect(self.conn_str) + cursor = conn.cursor() - deleteQuery = f"DELETE FROM {tableName} WHERE {logTimeColumn} < ?" - cursor.execute(deleteQuery, datetime.now() - timedelta(days=int(logRetentionDays))) - conn.commit() - except pyodbc.Error as ex: - - print("Error deleting old logs: {}".format(str(ex))) + oldestLogQuery = f"SELECT TOP 1 {logTimeColumn} FROM {tableName} ORDER BY {logTimeColumn} ASC" + cursor.execute(oldestLogQuery) + oldestLogTime = cursor.fetchone()[0] + + if oldestLogTime is not None and oldestLogTime < datetime.now() - timedelta(days=int(logRetentionDays)): + + deleteQuery = f"DELETE FROM {tableName} WHERE {logTimeColumn} < ?" + cursor.execute(deleteQuery, datetime.now() - timedelta(days=int(logRetentionDays))) + conn.commit() + + break + + except pyodbc.Error as ex: + + currentAttempts += 1 + print("Error deleting old logs: {}".format(str(ex))) + + if not currentAttempts <= maximumSQLAttempts: + raise + + time.sleep(1) diff --git a/main.py b/main.py index 735755d..652736a 100755 --- a/main.py +++ b/main.py @@ -160,6 +160,7 @@ def monitorUrls(stop_event): def logHostLog(hostname, logTime, cpu, memory): if not config.loggingMode == 'none': + manager = logsManager(config.sqlServer, config.sqlDatabase, config.sqlUsername, config.sqlPassword) manager.insertHostLog(hostname, socket.gethostbyname(socket.gethostname()), logTime, cpu, memory) @@ -173,7 +174,7 @@ def main(): signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGINT, signal_handler) - + hostMonitorThread = threading.Thread(target=monitorHost, args=(stop_event,)) urlMonitorThread = threading.Thread(target=monitorUrls, args=(stop_event,))