monutil/main.py

215 lines
7.1 KiB
Python
Executable file

#!/usr/bin/python3
# MIT License
# Copyright (c) 2024 Thomas Williams - https://git.server.wales/thomas
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import config
import psutil
import os
import time
import log
import requests
import threading
import signal
import socket
from functools import partial
from concurrent.futures import ThreadPoolExecutor, as_completed
from bs4 import BeautifulSoup
from log import logsManager
from datetime import datetime
if config.loggingMode == 'rabbitmq':
import rabbitmq
rabbitmq = rabbitmq.rabbitMQClient(config.rabbitmqca,config.rabbitmqcacert,config.rabbitmqcakey,config.rabbitmqHost,config.rabbitmqPort,config.rabbitmqRoutingKey)
stop_event = threading.Event()
nonPOSIXCPULoads = []
lock = threading.Lock()
def signal_handler(sig, frame):
print('SIGINT/SIGTERM aknowledged. Stopping script gracefully, please wait...')
stop_event.set()
def loadUrl(url):
headers = { 'User-Agent': 'Monutil monitor' }
response = requests.get(url, timeout=config.urlTimeout, headers=headers)
return response
def prepareUrl(src, baseUrl):
if not src.startswith("http://") and not src.startswith("https://"):
return baseUrl.rstrip("/") + "/" + src.lstrip("/")
return src
def nonPOSIXCPULoad(stop_event):
global nonPOSIXCPULoads
nonPOSIXCPULoad = 0
while not stop_event.is_set():
nonPOSIXCPULoad = psutil.cpu_percent(interval=1)
with lock:
nonPOSIXCPULoads.append(nonPOSIXCPULoad)
if len(nonPOSIXCPULoads) > 60:
nonPOSIXCPULoads.pop(0)
time.sleep(0.1)
def getNonPOSIXCPUAverage():
global nonPOSIXCPULoads
with lock:
if sum(nonPOSIXCPULoads) > 0:
avgLoad = sum(nonPOSIXCPULoads) / len(nonPOSIXCPULoads)
else:
avgLoad = 0
return avgLoad
def monitorHost(stop_event):
nonPOSIXCPUStarted = False
while not (stop_event.is_set()):
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.hostMonitorStartTime and time.strftime("%H:%M:%S") <= config.hostMonitorEndTime):
if os.name != 'posix' or config.forceNonPOSIXCPU:
if not nonPOSIXCPUStarted:
nonPOSIXCPUMonitor = threading.Thread(target=nonPOSIXCPULoad, args=(stop_event,))
nonPOSIXCPUMonitor.start()
nonPOSIXCPUStarted = True
loadavg = round(getNonPOSIXCPUAverage(), 2)
else:
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
loadavg = round((load1/os.cpu_count()) * 100, 2)
memory = psutil.virtual_memory().percent
logHostLog(socket.gethostname(), datetime.now(), loadavg, memory)
print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
print() # new line
time.sleep(config.hostMonitoringPeriod)
time.sleep(1)
def monitorUrls(stop_event):
while not (stop_event.is_set()):
while not (stop_event.is_set()) and (time.strftime("%H:%M:%S") >= config.urlMonitorStartTime and time.strftime("%H:%M:%S") <= config.urlMonitorEndTime):
for url in config.urls:
baseUrl = url
urlFail = False
startTime = time.time()
request = loadUrl(url)
if request.status_code == 200:
html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
responses = [future.result() for future in as_completed(responses)]
for response in responses:
if not response.status_code == 200:
urlFail = True
endTime = time.time()
timeDiff = endTime - startTime
print(baseUrl + " response time: " + str(timeDiff))
print() # new line
logURLLog(socket.gethostname(), datetime.now(), baseUrl, timeDiff)
else:
urlFail = True
time.sleep(config.urlMonitoringPeriod)
time.sleep(1)
def logHostLog(hostname, logTime, cpu, memory):
if not config.loggingMode == 'none' and not config.loggingMode == 'rabbitmq':
manager = logsManager(config.sqlServer, config.sqlDatabase, config.sqlUsername, config.sqlPassword)
manager.insertHostLog(hostname, socket.gethostbyname(socket.gethostname()), logTime, cpu, memory)
if config.loggingMode == 'rabbitmq':
rabbitmq.publish(hostname + '|' + socket.gethostbyname(socket.gethostname()) + '|' + str(logTime) + '|' + 'cpumem' + '|' + str(cpu) + '|' + str(memory))
def logURLLog(hostname, logTime, url, responseTime):
if not config.loggingMode == 'none' and not config.loggingMode == 'rabbitmq':
manager = logsManager(config.sqlServer, config.sqlDatabase, config.sqlUsername, config.sqlPassword)
manager.insertURLLog(hostname, socket.gethostbyname(socket.gethostname()), logTime, url, responseTime)
if config.loggingMode == 'rabbitmq':
rabbitmq.publish(hostname + '|' + socket.gethostbyname(socket.gethostname()) + '|' + str(logTime) + '|' + 'url' + '|' + url + '|' + str(responseTime))
def main():
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
hostMonitorThread = threading.Thread(target=monitorHost, args=(stop_event,))
urlMonitorThread = threading.Thread(target=monitorUrls, args=(stop_event,))
hostMonitorThread.start()
urlMonitorThread.start()
hostMonitorThread.join()
urlMonitorThread.join()
if __name__ == "__main__":
main()