Resolves issue #4

This commit is contained in:
Thomas Williams 2024-06-27 18:25:59 +01:00
parent c4edd9badd
commit 39059f342e
Signed by: thomas
GPG key ID: EB8F975CF60BCBFF
2 changed files with 61 additions and 40 deletions

View file

@ -1,6 +1,7 @@
#!/usr/bin/python3
monitoringPeriod = 30
hostMonitoringPeriod = 5
urlMonitoringPeriod = 10
urls = ["https://www.bootlesshacker.com"]
urlTimeout = 10
maxWorkers = 4

84
main.py
View file

@ -28,12 +28,15 @@ import os
import time
import log
import requests
import threading
from functools import partial
from concurrent.futures import ThreadPoolExecutor, as_completed
from bs4 import BeautifulSoup
def loadUrl(url):
headers = { 'User-Agent': 'Monutil monitor' }
response = requests.get(url, timeout=config.urlTimeout, headers=headers)
return response
@ -43,52 +46,69 @@ def prepareUrl(src, baseUrl):
return baseUrl.rstrip("/") + "/" + src.lstrip("/")
return src
while True:
def monitorHost():
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
loadavg = round((load1/os.cpu_count()) * 100, 2)
while True:
total_memory, used_memory, free_memory = map(
int, os.popen('free -t -m').readlines()[-1].split()[1:])
memory = round((used_memory/total_memory) * 100, 2)
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
loadavg = round((load1/os.cpu_count()) * 100, 2)
print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
total_memory, used_memory, free_memory = map(
int, os.popen('free -t -m').readlines()[-1].split()[1:])
memory = round((used_memory/total_memory) * 100, 2)
# Log CPU/Memory
print("CPU %: " + str(loadavg))
print("Memory %: " + str(memory))
headers = {
'User-Agent': 'Monutil monitor'
}
time.sleep(config.hostMonitoringPeriod)
for url in config.urls:
def monitorUrls():
baseUrl = url
urlFail = False
while True:
startTime = time.time()
request = loadUrl(url)
for url in config.urls:
if request.status_code == 200:
baseUrl = url
urlFail = False
html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
startTime = time.time()
request = loadUrl(url)
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
if request.status_code == 200:
responses = [future.result() for future in as_completed(responses)]
html = BeautifulSoup(request.content, 'html.parser')
imageUrls = [img['src'] for img in html.find_all('img')]
for response in responses:
if not response.status_code == 200:
urlFail = True
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
endTime = time.time()
timeDiff = endTime - startTime
print(timeDiff)
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
responses = [future.result() for future in as_completed(responses)]
else:
for response in responses:
urlFail = True
if not response.status_code == 200:
urlFail = True
time.sleep(config.monitoringPeriod)
endTime = time.time()
timeDiff = endTime - startTime
print(timeDiff)
else:
urlFail = True
time.sleep(config.urlMonitoringPeriod)
def main():
hostMonitorThread = threading.Thread(target=monitorHost)
urlMonitorThread = threading.Thread(target=monitorUrls)
hostMonitorThread.start()
urlMonitorThread.start()
hostMonitorThread.join()
urlMonitorThread.join()
if __name__ == "__main__":
main()