From 39059f342e4af43cb7b9e3d1d3fc21a04041f4b1 Mon Sep 17 00:00:00 2001 From: Thomas Williams Date: Thu, 27 Jun 2024 18:25:59 +0100 Subject: [PATCH] Resolves issue #4 --- config.py | 3 +- main.py | 98 +++++++++++++++++++++++++++++++++---------------------- 2 files changed, 61 insertions(+), 40 deletions(-) diff --git a/config.py b/config.py index 0158402..0cb6476 100644 --- a/config.py +++ b/config.py @@ -1,6 +1,7 @@ #!/usr/bin/python3 -monitoringPeriod = 30 +hostMonitoringPeriod = 5 +urlMonitoringPeriod = 10 urls = ["https://www.bootlesshacker.com"] urlTimeout = 10 maxWorkers = 4 diff --git a/main.py b/main.py index 5f45980..e466694 100755 --- a/main.py +++ b/main.py @@ -28,12 +28,15 @@ import os import time import log import requests +import threading from functools import partial from concurrent.futures import ThreadPoolExecutor, as_completed from bs4 import BeautifulSoup def loadUrl(url): + headers = { 'User-Agent': 'Monutil monitor' } + response = requests.get(url, timeout=config.urlTimeout, headers=headers) return response @@ -43,52 +46,69 @@ def prepareUrl(src, baseUrl): return baseUrl.rstrip("/") + "/" + src.lstrip("/") return src -while True: - - load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix - loadavg = round((load1/os.cpu_count()) * 100, 2) +def monitorHost(): - total_memory, used_memory, free_memory = map( - int, os.popen('free -t -m').readlines()[-1].split()[1:]) - memory = round((used_memory/total_memory) * 100, 2) + while True: - print("CPU %: " + str(loadavg)) - print("Memory %: " + str(memory)) - - # Log CPU/Memory - - headers = { - 'User-Agent': 'Monutil monitor' - } + load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix + loadavg = round((load1/os.cpu_count()) * 100, 2) - for url in config.urls: - - baseUrl = url - urlFail = False + total_memory, used_memory, free_memory = map( + int, os.popen('free -t -m').readlines()[-1].split()[1:]) + memory = round((used_memory/total_memory) * 100, 2) - startTime = time.time() - request = loadUrl(url) - - if request.status_code == 200: - - html = BeautifulSoup(request.content, 'html.parser') - imageUrls = [img['src'] for img in html.find_all('img')] + print("CPU %: " + str(loadavg)) + print("Memory %: " + str(memory)) - with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor: - responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls] - - responses = [future.result() for future in as_completed(responses)] + time.sleep(config.hostMonitoringPeriod) - for response in responses: - if not response.status_code == 200: - urlFail = True +def monitorUrls(): - endTime = time.time() - timeDiff = endTime - startTime - print(timeDiff) + while True: - else: + for url in config.urls: - urlFail = True + baseUrl = url + urlFail = False - time.sleep(config.monitoringPeriod) + startTime = time.time() + request = loadUrl(url) + + if request.status_code == 200: + + html = BeautifulSoup(request.content, 'html.parser') + imageUrls = [img['src'] for img in html.find_all('img')] + + with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor: + + responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls] + responses = [future.result() for future in as_completed(responses)] + + for response in responses: + + if not response.status_code == 200: + urlFail = True + + endTime = time.time() + timeDiff = endTime - startTime + + print(timeDiff) + + else: + urlFail = True + + time.sleep(config.urlMonitoringPeriod) + +def main(): + + hostMonitorThread = threading.Thread(target=monitorHost) + urlMonitorThread = threading.Thread(target=monitorUrls) + + hostMonitorThread.start() + urlMonitorThread.start() + + hostMonitorThread.join() + urlMonitorThread.join() + +if __name__ == "__main__": + main()