Resolves issue #4
This commit is contained in:
parent
c4edd9badd
commit
39059f342e
2 changed files with 61 additions and 40 deletions
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
monitoringPeriod = 30
|
hostMonitoringPeriod = 5
|
||||||
|
urlMonitoringPeriod = 10
|
||||||
urls = ["https://www.bootlesshacker.com"]
|
urls = ["https://www.bootlesshacker.com"]
|
||||||
urlTimeout = 10
|
urlTimeout = 10
|
||||||
maxWorkers = 4
|
maxWorkers = 4
|
||||||
|
|
98
main.py
98
main.py
|
@ -28,12 +28,15 @@ import os
|
||||||
import time
|
import time
|
||||||
import log
|
import log
|
||||||
import requests
|
import requests
|
||||||
|
import threading
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
def loadUrl(url):
|
def loadUrl(url):
|
||||||
|
|
||||||
|
headers = { 'User-Agent': 'Monutil monitor' }
|
||||||
|
|
||||||
response = requests.get(url, timeout=config.urlTimeout, headers=headers)
|
response = requests.get(url, timeout=config.urlTimeout, headers=headers)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -43,52 +46,69 @@ def prepareUrl(src, baseUrl):
|
||||||
return baseUrl.rstrip("/") + "/" + src.lstrip("/")
|
return baseUrl.rstrip("/") + "/" + src.lstrip("/")
|
||||||
return src
|
return src
|
||||||
|
|
||||||
while True:
|
def monitorHost():
|
||||||
|
|
||||||
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
|
|
||||||
loadavg = round((load1/os.cpu_count()) * 100, 2)
|
|
||||||
|
|
||||||
total_memory, used_memory, free_memory = map(
|
while True:
|
||||||
int, os.popen('free -t -m').readlines()[-1].split()[1:])
|
|
||||||
memory = round((used_memory/total_memory) * 100, 2)
|
|
||||||
|
|
||||||
print("CPU %: " + str(loadavg))
|
load1, load5, load15 = psutil.getloadavg() # this takes time to warm up if not running script on *nix
|
||||||
print("Memory %: " + str(memory))
|
loadavg = round((load1/os.cpu_count()) * 100, 2)
|
||||||
|
|
||||||
# Log CPU/Memory
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
'User-Agent': 'Monutil monitor'
|
|
||||||
}
|
|
||||||
|
|
||||||
for url in config.urls:
|
total_memory, used_memory, free_memory = map(
|
||||||
|
int, os.popen('free -t -m').readlines()[-1].split()[1:])
|
||||||
baseUrl = url
|
memory = round((used_memory/total_memory) * 100, 2)
|
||||||
urlFail = False
|
|
||||||
|
|
||||||
startTime = time.time()
|
print("CPU %: " + str(loadavg))
|
||||||
request = loadUrl(url)
|
print("Memory %: " + str(memory))
|
||||||
|
|
||||||
if request.status_code == 200:
|
|
||||||
|
|
||||||
html = BeautifulSoup(request.content, 'html.parser')
|
|
||||||
imageUrls = [img['src'] for img in html.find_all('img')]
|
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
|
time.sleep(config.hostMonitoringPeriod)
|
||||||
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
|
|
||||||
|
|
||||||
responses = [future.result() for future in as_completed(responses)]
|
|
||||||
|
|
||||||
for response in responses:
|
def monitorUrls():
|
||||||
if not response.status_code == 200:
|
|
||||||
urlFail = True
|
|
||||||
|
|
||||||
endTime = time.time()
|
while True:
|
||||||
timeDiff = endTime - startTime
|
|
||||||
print(timeDiff)
|
|
||||||
|
|
||||||
else:
|
for url in config.urls:
|
||||||
|
|
||||||
urlFail = True
|
baseUrl = url
|
||||||
|
urlFail = False
|
||||||
|
|
||||||
time.sleep(config.monitoringPeriod)
|
startTime = time.time()
|
||||||
|
request = loadUrl(url)
|
||||||
|
|
||||||
|
if request.status_code == 200:
|
||||||
|
|
||||||
|
html = BeautifulSoup(request.content, 'html.parser')
|
||||||
|
imageUrls = [img['src'] for img in html.find_all('img')]
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=config.maxWorkers) as executor:
|
||||||
|
|
||||||
|
responses = [executor.submit(loadUrl, prepareUrl(url, baseUrl)) for url in imageUrls]
|
||||||
|
responses = [future.result() for future in as_completed(responses)]
|
||||||
|
|
||||||
|
for response in responses:
|
||||||
|
|
||||||
|
if not response.status_code == 200:
|
||||||
|
urlFail = True
|
||||||
|
|
||||||
|
endTime = time.time()
|
||||||
|
timeDiff = endTime - startTime
|
||||||
|
|
||||||
|
print(timeDiff)
|
||||||
|
|
||||||
|
else:
|
||||||
|
urlFail = True
|
||||||
|
|
||||||
|
time.sleep(config.urlMonitoringPeriod)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
hostMonitorThread = threading.Thread(target=monitorHost)
|
||||||
|
urlMonitorThread = threading.Thread(target=monitorUrls)
|
||||||
|
|
||||||
|
hostMonitorThread.start()
|
||||||
|
urlMonitorThread.start()
|
||||||
|
|
||||||
|
hostMonitorThread.join()
|
||||||
|
urlMonitorThread.join()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
Loading…
Reference in a new issue