This closes issue20

This commit is contained in:
Thomas Williams 2024-07-05 13:42:28 +01:00
parent 29546b6280
commit 26d2d9b168
Signed by: thomas
GPG key ID: EB8F975CF60BCBFF
2 changed files with 64 additions and 51 deletions

View file

@ -17,4 +17,9 @@ Both CPU/RAM monitoring and URL monitoring can be set on their own monitoring pe
- **sqlUsername** - the username used to authenticate to the SQL server.
- **sqlPassword** - the password used to authenticate to the SQL server.
- **logRetentionDays** - the maximum age logs should be kept.
- **maximumSQLAttempts** - the maximum number of attempts to try certain SQL operations
- **maximumSQLAttempts** - the maximum number of attempts to try certain SQL operations.
- **hostMonitorStartTime** - the start time which the host monitor should start at the earliest
- **hostMonitorEndTime** - the end time which the host monitor should shut down.
- **urlMonitorStartTime** - the start time which the url monitor should start at the earliest
- **urlMonitorEndTime** - the end time which the url monitor whould shut down.

108
main.py
View file

@ -88,80 +88,88 @@ def getNonPOSIXCPUAverage():
return avgLoad
def monitorHost(stop_event):
while time.strftime("%H:%M:%S") <= config.hostMonitorStartTime:
time.sleep(1) # This block is important to ensure the thread sleeps until the start time is reached. Else the thread wont start if the script is started before the start time
def monitorHost(stop_event):
nonPOSIXCPUStarted = False
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.hostMonitorStartTime and time.strftime("%H:%M:%S") <= config.hostMonitorEndTime):
while not (stop_event.is_set()):
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.hostMonitorStartTime and time.strftime("%H:%M:%S") <= config.hostMonitorEndTime):
if os.name != 'posix' or config.forceNonPOSIXCPU:
if os.name != 'posix' or config.forceNonPOSIXCPU:
if not nonPOSIXCPUStarted:
if not nonPOSIXCPUStarted:
nonPOSIXCPUMonitor = threading.Thread(target=nonPOSIXCPULoad, args=(stop_event,))
nonPOSIXCPUMonitor.start()
nonPOSIXCPUStarted = True
nonPOSIXCPUMonitor = threading.Thread(target=nonPOSIXCPULoad, args=(stop_event,))
nonPOSIXCPUMonitor.start()
nonPOSIXCPUStarted = True
loadavg = round(getNonPOSIXCPUAverage(), 2)
loadavg = round(getNonPOSIXCPUAverage(), 2)
else:
else:
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
loadavg = round((load1/os.cpu_count()) * 100, 2)
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
loadavg = round((load1/os.cpu_count()) * 100, 2)
memory = psutil.virtual_memory().percent
logHostLog(socket.gethostname(), datetime.now(), loadavg, memory)
memory = psutil.virtual_memory().percent
logHostLog(socket.gethostname(), datetime.now(), loadavg, memory)
print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
print() # new line
print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
print() # new line
time.sleep(config.hostMonitoringPeriod)
time.sleep(config.hostMonitoringPeriod)
time.sleep(1)
def monitorUrls(stop_event):
while time.strftime("%H:%M:%S") <= config.urlMonitorStartTime:
time.sleep(1) # This block is important to ensure the thread sleeps until the start time is reached. Else the thread wont start if the script is started before the start time
while not (stop_event.is_set()):
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.urlMonitorStartTime and time.strftime("%H:%M:%S") <= config.urlMonitorEndTime):
for url in config.urls:
baseUrl = url
urlFail = False
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.urlMonitorStartTime and time.strftime("%H:%M:%S") <= config.urlMonitorEndTime):
startTime = time.time()
request = loadUrl(url)
for url in config.urls:
if request.status_code == 200:
baseUrl = url
urlFail = False
html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
startTime = time.time()
request = loadUrl(url)
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
responses = [future.result() for future in as_completed(responses)]
if request.status_code == 200:
for response in responses:
if not response.status_code == 200:
urlFail = True
html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
endTime = time.time()
timeDiff = endTime - startTime
print(baseUrl + " response time: " + str(timeDiff))
print() # new line
logURLLog(socket.gethostname(), datetime.now(), baseUrl, timeDiff)
else:
urlFail = True
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
time.sleep(config.urlMonitoringPeriod)
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
responses = [future.result() for future in as_completed(responses)]
for response in responses:
if not response.status_code == 200:
urlFail = True
endTime = time.time()
timeDiff = endTime - startTime
print(baseUrl + " response time: " + str(timeDiff))
print() # new line
logURLLog(socket.gethostname(), datetime.now(), baseUrl, timeDiff)
else:
urlFail = True
time.sleep(config.urlMonitoringPeriod)
time.sleep(1)
def logHostLog(hostname, logTime, cpu, memory):