Closses issue #19
This commit is contained in:
parent
a677ce90ba
commit
dca95f5780
4 changed files with 122 additions and 69 deletions
25
README.md
25
README.md
|
@ -5,15 +5,16 @@ Both CPU/RAM monitoring and URL monitoring can be set on their own monitoring pe
|
||||||
|
|
||||||
**Configuration options:**
|
**Configuration options:**
|
||||||
|
|
||||||
- hostMonitoringPeriod - the delay in between the CPU and RAM usage being probed (defined in seconds).
|
- **hostMonitoringPeriod** - the delay in between the CPU and RAM usage being probed (defined in seconds).
|
||||||
- urlMonitoringPeriod - the delay in between monitoring all of the URLs (defined in seconds).
|
- **urlMonitoringPeriod** - the delay in between monitoring all of the URLs (defined in seconds).
|
||||||
- urls - the list of URLs to monitor (e.g. ["url1", "url2"]).
|
- **urls** - the list of URLs to monitor (e.g. ["url1", "url2"]).
|
||||||
- urlTimeout - the delay before considering a URL to have timed out.
|
- **urlTimeout** - the delay before considering a URL to have timed out.
|
||||||
- maxWorkers - the amount of threads to use when pulling URL resources. Do not set above the maximum number of threads on the host.
|
- **maxWorkers** - the amount of threads to use when pulling URL resources. Do not set above the maximum number of threads on the host.
|
||||||
- forceNonPOSIXCPU - For POSIX compatible systems, psutil.getloadavg() is executed which relies on os.getloadavg(). For Windows, this seemingly returns 0 (at least on the version executed during development). For Windows, a custom function has been built to obtain running CPU averages, but you can choose to use this function on POSIX systems by setting this variable to True.
|
- **forceNonPOSIXCPU** - For POSIX compatible systems, psutil.getloadavg() is executed which relies on os.getloadavg(). For Windows, this seemingly returns 0 (at least on the version executed during development). For Windows, a custom function has been built to obtain running CPU averages, but you can choose to use this function on POSIX systems by setting this variable to True.
|
||||||
- loggingMode - Valid options: mssql, mariadb, rabbitmq, none.
|
- **loggingMode** - Valid options: mssql, mariadb, rabbitmq, none.
|
||||||
- sqlServer - the address of the SQL server which to write the data.
|
- **sqlServer** - the address of the SQL server which to write the data.
|
||||||
- sqlDatabase - the database to write the data.
|
- **sqlDatabase** - the database to write the data.
|
||||||
- sqlUsername - the username used to authenticate to the SQL server.
|
- **sqlUsername** - the username used to authenticate to the SQL server.
|
||||||
- sqlPassword - the password used to authenticate to the SQL server.
|
- **sqlPassword** - the password used to authenticate to the SQL server.
|
||||||
- logRetentionDays - the maximum age logs should be kept.
|
- **logRetentionDays** - the maximum age logs should be kept.
|
||||||
|
- **maximumSQLAttempts** - the maximum number of attempts to try certain SQL operations
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
# See README before changing any of these options.
|
# See README before changing any of these options.
|
||||||
|
|
||||||
hostMonitoringPeriod = 1
|
hostMonitoringPeriod = 15
|
||||||
urlMonitoringPeriod = 10
|
urlMonitoringPeriod = 60
|
||||||
urls = ["https://1.1.1.1"]
|
urls = ["https://1.1.1.1"]
|
||||||
urlTimeout = 10
|
urlTimeout = 10
|
||||||
maxWorkers = 4
|
maxWorkers = 4
|
||||||
|
@ -13,4 +13,5 @@ sqlServer = ''
|
||||||
sqlDatabase = ''
|
sqlDatabase = ''
|
||||||
sqlUsername = ''
|
sqlUsername = ''
|
||||||
sqlPassword = ''
|
sqlPassword = ''
|
||||||
logRetentionDays = '90'
|
logRetentionDays = 90
|
||||||
|
maximumSQLAttempts = 3
|
||||||
|
|
156
log.py
156
log.py
|
@ -23,7 +23,8 @@
|
||||||
# SOFTWARE.
|
# SOFTWARE.
|
||||||
|
|
||||||
import pyodbc
|
import pyodbc
|
||||||
from config import logRetentionDays
|
import time
|
||||||
|
from config import logRetentionDays, maximumSQLAttempts
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
class logsManager:
|
class logsManager:
|
||||||
|
@ -32,79 +33,128 @@ class logsManager:
|
||||||
self.conn_str = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password
|
self.conn_str = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password
|
||||||
|
|
||||||
def insertHost(self, hostname, ipAddress):
|
def insertHost(self, hostname, ipAddress):
|
||||||
|
|
||||||
try:
|
currentAttempts = 1
|
||||||
|
|
||||||
conn = pyodbc.connect(self.conn_str)
|
while currentAttempts <= maximumSQLAttempts:
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT COUNT(*) FROM monutil_hosts WHERE hostname = ?", hostname)
|
try:
|
||||||
|
|
||||||
if cursor.fetchone()[0] == 0:
|
conn = pyodbc.connect(self.conn_str)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM monutil_hosts WHERE hostname = ?", hostname)
|
||||||
|
|
||||||
|
if cursor.fetchone()[0] == 0:
|
||||||
|
|
||||||
|
cursor.execute("INSERT INTO monutil_hosts (hostname, ipAddress) VALUES (?, ?)", hostname, ipAddress)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
cursor.execute("UPDATE monutil_hosts SET ipAddress = ? WHERE hostname = ?", ipAddress, hostname)
|
||||||
|
conn.close()
|
||||||
|
break
|
||||||
|
|
||||||
|
except pyodbc.Error as ex:
|
||||||
|
|
||||||
|
currentAttempts += 1
|
||||||
|
print("SQL Error: {}".format(str(ex)))
|
||||||
|
|
||||||
cursor.execute("INSERT INTO monutil_hosts (hostname, ipAddress) VALUES (?, ?)", hostname, ipAddress)
|
if not currentAttempts <= maximumSQLAttempts:
|
||||||
conn.commit()
|
raise
|
||||||
|
|
||||||
else:
|
time.sleep(1)
|
||||||
|
|
||||||
cursor.execute("UPDATE monutil_hosts SET ipAddress = ? WHERE hostname = ?", ipAddress, hostname)
|
|
||||||
|
|
||||||
except pyodbc.Error as ex:
|
|
||||||
|
|
||||||
print("Error inserting data: {}".format(str(ex)))
|
|
||||||
|
|
||||||
def insertHostLog(self, hostname, ipAddress, log_time, cpu, memory):
|
def insertHostLog(self, hostname, ipAddress, log_time, cpu, memory):
|
||||||
|
|
||||||
try:
|
currentAttempts = 1
|
||||||
|
|
||||||
self.insertHost(hostname, ipAddress)
|
|
||||||
self.deleteOldLogs("monutil_hostLogs", "logTime")
|
|
||||||
|
|
||||||
conn = pyodbc.connect(self.conn_str)
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
cursor.execute("INSERT INTO monutil_hostlogs (hostname, logTime, cpu, memory) VALUES (?, ?, ?, ?)", hostname, log_time, cpu, memory)
|
self.insertHost(hostname, ipAddress)
|
||||||
conn.commit()
|
self.deleteOldLogs("monutil_hostlogs", "logTime")
|
||||||
conn.close()
|
|
||||||
|
while currentAttempts <= maximumSQLAttempts:
|
||||||
|
|
||||||
except pyodbc.Error as ex:
|
try:
|
||||||
|
|
||||||
|
conn = pyodbc.connect(self.conn_str)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
cursor.execute("INSERT INTO monutil_hostlogs (hostname, logTime, cpu, memory) VALUES (?, ?, ?, ?)", hostname, log_time, cpu, memory)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
break
|
||||||
|
|
||||||
|
except pyodbc.Error as ex:
|
||||||
|
|
||||||
print("Error inserting data: {}".format(str(ex)))
|
currentAttempts += 1
|
||||||
|
print("Error inserting data: {}".format(str(ex)))
|
||||||
|
|
||||||
|
if not currentAttempts <= maximumSQLAttempts:
|
||||||
|
raise
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def insertURLLog(self, hostname, ipAddress, log_time, url, responseTime):
|
def insertURLLog(self, hostname, ipAddress, log_time, url, responseTime):
|
||||||
|
|
||||||
try:
|
currentAttempts = 1
|
||||||
|
|
||||||
self.insertHost(hostname, ipAddress)
|
self.insertHost(hostname, ipAddress)
|
||||||
self.deleteOldLogs("monutil_urlLogs", "logTime")
|
self.deleteOldLogs("monutil_urllogs", "logTime")
|
||||||
|
|
||||||
|
while currentAttempts <= maximumSQLAttempts:
|
||||||
|
|
||||||
conn = pyodbc.connect(self.conn_str)
|
try:
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
cursor.execute("INSERT INTO monutil_urlLogs (hostname, url, logTime, responseTime) VALUES (?, ?, ?, ?)", hostname, url, log_time, responseTime)
|
conn = pyodbc.connect(self.conn_str)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
conn.commit()
|
cursor.execute("INSERT INTO monutil_urlLogs (hostname, url, logTime, responseTime) VALUES (?, ?, ?, ?)", hostname, url, log_time, responseTime)
|
||||||
conn.close()
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
break
|
||||||
|
|
||||||
except pyodbc.Error as ex:
|
except pyodbc.Error as ex:
|
||||||
|
|
||||||
print("Error inserting data into monutil_urlLogs:", ex)
|
currentAttempts += 1
|
||||||
|
print("Error inserting data into monutil_urlLogs:", ex)
|
||||||
|
|
||||||
|
if not currentAttempts <= maximumSQLAttempts:
|
||||||
|
raise
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def deleteOldLogs(self, tableName, logTimeColumn):
|
def deleteOldLogs(self, tableName, logTimeColumn):
|
||||||
|
|
||||||
|
currentAttempts = 1
|
||||||
|
|
||||||
try:
|
while currentAttempts <= maximumSQLAttempts:
|
||||||
|
|
||||||
conn = pyodbc.connect(self.conn_str)
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
oldestLogQuery = f"SELECT TOP 1 {logTimeColumn} FROM {tableName} ORDER BY {logTimeColumn} ASC"
|
try:
|
||||||
cursor.execute(oldestLogQuery)
|
|
||||||
oldestLogTime = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
if oldestLogTime is not None and oldestLogTime < datetime.now() - timedelta(days=int(logRetentionDays)):
|
conn = pyodbc.connect(self.conn_str)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
deleteQuery = f"DELETE FROM {tableName} WHERE {logTimeColumn} < ?"
|
oldestLogQuery = f"SELECT TOP 1 {logTimeColumn} FROM {tableName} ORDER BY {logTimeColumn} ASC"
|
||||||
cursor.execute(deleteQuery, datetime.now() - timedelta(days=int(logRetentionDays)))
|
cursor.execute(oldestLogQuery)
|
||||||
conn.commit()
|
oldestLogTime = cursor.fetchone()[0]
|
||||||
except pyodbc.Error as ex:
|
|
||||||
|
if oldestLogTime is not None and oldestLogTime < datetime.now() - timedelta(days=int(logRetentionDays)):
|
||||||
print("Error deleting old logs: {}".format(str(ex)))
|
|
||||||
|
deleteQuery = f"DELETE FROM {tableName} WHERE {logTimeColumn} < ?"
|
||||||
|
cursor.execute(deleteQuery, datetime.now() - timedelta(days=int(logRetentionDays)))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
except pyodbc.Error as ex:
|
||||||
|
|
||||||
|
currentAttempts += 1
|
||||||
|
print("Error deleting old logs: {}".format(str(ex)))
|
||||||
|
|
||||||
|
if not currentAttempts <= maximumSQLAttempts:
|
||||||
|
raise
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
|
3
main.py
3
main.py
|
@ -160,6 +160,7 @@ def monitorUrls(stop_event):
|
||||||
def logHostLog(hostname, logTime, cpu, memory):
|
def logHostLog(hostname, logTime, cpu, memory):
|
||||||
|
|
||||||
if not config.loggingMode == 'none':
|
if not config.loggingMode == 'none':
|
||||||
|
|
||||||
manager = logsManager(config.sqlServer, config.sqlDatabase, config.sqlUsername, config.sqlPassword)
|
manager = logsManager(config.sqlServer, config.sqlDatabase, config.sqlUsername, config.sqlPassword)
|
||||||
manager.insertHostLog(hostname, socket.gethostbyname(socket.gethostname()), logTime, cpu, memory)
|
manager.insertHostLog(hostname, socket.gethostbyname(socket.gethostname()), logTime, cpu, memory)
|
||||||
|
|
||||||
|
@ -173,7 +174,7 @@ def main():
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, signal_handler)
|
signal.signal(signal.SIGTERM, signal_handler)
|
||||||
signal.signal(signal.SIGINT, signal_handler)
|
signal.signal(signal.SIGINT, signal_handler)
|
||||||
|
|
||||||
hostMonitorThread = threading.Thread(target=monitorHost, args=(stop_event,))
|
hostMonitorThread = threading.Thread(target=monitorHost, args=(stop_event,))
|
||||||
urlMonitorThread = threading.Thread(target=monitorUrls, args=(stop_event,))
|
urlMonitorThread = threading.Thread(target=monitorUrls, args=(stop_event,))
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue