Resolves issue #4

This commit is contained in:
Thomas Williams 2024-06-27 18:25:59 +01:00
parent c4edd9badd
commit 39059f342e
Signed by: thomas
GPG key ID: EB8F975CF60BCBFF
2 changed files with 61 additions and 40 deletions

View file

@ -1,6 +1,7 @@
#!/usr/bin/python3 #!/usr/bin/python3
monitoringPeriod = 30 hostMonitoringPeriod = 5
urlMonitoringPeriod = 10
urls = ["https://www.bootlesshacker.com"] urls = ["https://www.bootlesshacker.com"]
urlTimeout = 10 urlTimeout = 10
maxWorkers = 4 maxWorkers = 4

84
main.py
View file

@ -28,12 +28,15 @@ import os
import time import time
import log import log
import requests import requests
import threading
from functools import partial from functools import partial
from concurrent.futures import ThreadPoolExecutor, as_completed from concurrent.futures import ThreadPoolExecutor, as_completed
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
def loadUrl(url): def loadUrl(url):
headers = { 'User-Agent': 'Monutil monitor' }
response = requests.get(url, timeout=config.urlTimeout, headers=headers) response = requests.get(url, timeout=config.urlTimeout, headers=headers)
return response return response
@ -43,52 +46,69 @@ def prepareUrl(src, baseUrl):
return baseUrl.rstrip("/") + "/" + src.lstrip("/") return baseUrl.rstrip("/") + "/" + src.lstrip("/")
return src return src
while True: def monitorHost():
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix while True:
loadavg = round((load1/os.cpu_count()) * 100, 2)
total_memory, used_memory, free_memory = map( load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
int, os.popen('free -t -m').readlines()[-1].split()[1:]) loadavg = round((load1/os.cpu_count()) * 100, 2)
memory = round((used_memory/total_memory) * 100, 2)
print("CPU %: " + str(loadavg)) total_memory, used_memory, free_memory = map(
print("Memory %: " + str(memory)) int, os.popen('free -t -m').readlines()[-1].split()[1:])
memory = round((used_memory/total_memory) * 100, 2)
# Log CPU/Memory print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
headers = { time.sleep(config.hostMonitoringPeriod)
'User-Agent': 'Monutil monitor'
}
for url in config.urls: def monitorUrls():
baseUrl = url while True:
urlFail = False
startTime = time.time() for url in config.urls:
request = loadUrl(url)
if request.status_code == 200: baseUrl = url
urlFail = False
html = BeautifulSoup(request.content, 'html.parser') startTime = time.time()
imageUrls = [img['src'] for img in html.find_all('img')] request = loadUrl(url)
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor: if request.status_code == 200:
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
responses = [future.result() for future in as_completed(responses)] html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
for response in responses: with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
if not response.status_code == 200:
urlFail = True
endTime = time.time() responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
timeDiff = endTime - startTime responses = [future.result() for future in as_completed(responses)]
print(timeDiff)
else: for response in responses:
urlFail = True if not response.status_code == 200:
urlFail = True
time.sleep(config.monitoringPeriod) endTime = time.time()
timeDiff = endTime - startTime
print(timeDiff)
else:
urlFail = True
time.sleep(config.urlMonitoringPeriod)
def main():
hostMonitorThread = threading.Thread(target=monitorHost)
urlMonitorThread = threading.Thread(target=monitorUrls)
hostMonitorThread.start()
urlMonitorThread.start()
hostMonitorThread.join()
urlMonitorThread.join()
if __name__ == "__main__":
main()