Commit b9af1dca authored by Rafael Monnerat's avatar Rafael Monnerat

Merge branch 'master' into 'master'

Release 0.52 and start new development cycle

See merge request !2
parents dc9a069a d1b4a7fd
...@@ -2,7 +2,7 @@ from setuptools import setup, find_packages ...@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '0.52.dev0' version = '0.53.dev0'
name = 'slapos.toolbox' name = 'slapos.toolbox'
long_description = open("README.rst").read() + "\n" long_description = open("README.rst").read() + "\n"
...@@ -92,4 +92,4 @@ setup(name=name, ...@@ -92,4 +92,4 @@ setup(name=name,
'networkbench = slapos.networkbench:main' 'networkbench = slapos.networkbench:main'
] ]
}, },
) )
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
...@@ -11,6 +11,7 @@ import shutil ...@@ -11,6 +11,7 @@ import shutil
import netifaces import netifaces
import random import random
import pycurl import pycurl
import json
from StringIO import StringIO from StringIO import StringIO
botname = socket.gethostname() botname = socket.gethostname()
...@@ -82,6 +83,10 @@ def _test_url_request(url): ...@@ -82,6 +83,10 @@ def _test_url_request(url):
curl.setopt(curl.CONNECTTIMEOUT, 10) curl.setopt(curl.CONNECTTIMEOUT, 10)
curl.setopt(curl.TIMEOUT, 300) curl.setopt(curl.TIMEOUT, 300)
curl.setopt(curl.WRITEDATA, buffer) curl.setopt(curl.WRITEDATA, buffer)
curl.setopt(curl.SSL_VERIFYPEER, False)
curl.setopt(curl.SSL_VERIFYHOST, False)
result = "OK"
try: try:
curl.perform() curl.perform()
...@@ -89,10 +94,11 @@ def _test_url_request(url): ...@@ -89,10 +94,11 @@ def _test_url_request(url):
import traceback import traceback
traceback.print_exc(file=sys.stderr) traceback.print_exc(file=sys.stderr)
sys.stderr.flush() sys.stderr.flush()
result = "FAIL"
body = buffer.getvalue() body = buffer.getvalue()
rendering_time = "%s/%s/%s/%s/%s" % \ rendering_time = "%s;%s;%s;%s;%s" % \
(curl.getinfo(curl.NAMELOOKUP_TIME), (curl.getinfo(curl.NAMELOOKUP_TIME),
curl.getinfo(curl.CONNECT_TIME), curl.getinfo(curl.CONNECT_TIME),
curl.getinfo(curl.PRETRANSFER_TIME), curl.getinfo(curl.PRETRANSFER_TIME),
...@@ -103,9 +109,42 @@ def _test_url_request(url): ...@@ -103,9 +109,42 @@ def _test_url_request(url):
curl.close() curl.close()
info_list = ('GET', url, response_code, rendering_time, "OK") info_list = ('GET', url, response_code, rendering_time, result)
return info_list return info_list
def download_external_configuration(url):
buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, url)
curl.setopt(curl.CONNECTTIMEOUT, 10)
curl.setopt(curl.TIMEOUT, 300)
curl.setopt(curl.WRITEDATA, buffer)
curl.setopt(curl.SSL_VERIFYPEER, False)
curl.setopt(curl.SSL_VERIFYHOST, False)
try:
curl.perform()
except:
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
response_code = curl.getinfo(pycurl.HTTP_CODE)
curl.close()
if response_code == 200:
try:
return json.loads(buffer.getvalue())
except ValueError:
print "Unable to parse external configuration, error:"
import traceback
traceback.print_exc(file=sys.stderr)
sys.stderr.flush()
print "Ignoring external configuration"
return {}
def is_rotate_log(log_file_path): def is_rotate_log(log_file_path):
try: try:
...@@ -154,7 +193,7 @@ def create_logger(name, log_folder): ...@@ -154,7 +193,7 @@ def create_logger(name, log_folder):
stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=True) stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=True)
sp.communicate() sp.communicate()
format = "%%(asctime)-16s;%%(levelname)s;%s;%%(message)s" % botname format = "%%(asctime)-16s;%s;%%(message)s" % botname
handler.setFormatter(logging.Formatter(format)) handler.setFormatter(logging.Formatter(format))
new_logger.addHandler(handler) new_logger.addHandler(handler)
return new_logger return new_logger
...@@ -174,32 +213,53 @@ def main(): ...@@ -174,32 +213,53 @@ def main():
delay = random.randint(0, 30) delay = random.randint(0, 30)
name_list = config.get("network_bench", "dns") name_list = []
url_list = config.get("network_bench", "url") url_list = []
ping_list = config.get("network_bench", "ping") ping_list = []
ping6_list = config.get("network_bench", "ping6") ping6_list = []
if config.has_option("network_bench", "dns"):
name_list = config.get("network_bench", "dns").split()
if config.has_option("network_bench", "url"):
url_list = config.get("network_bench", "url").split()
logger = create_logger("info", log_folder) if config.has_option("network_bench", "ping"):
ping_list = config.get("network_bench", "ping").split()
logger.debug('Starting a new test in %s seconds' % delay) if config.has_option("network_bench", "ping6"):
ping6_list = config.get("network_bench", "ping6").split()
if config.has_option("network_bench", "test_distributor_url"):
external_configuration_url = config.get("network_bench", "test_distributor_url")
external_config_dict = download_external_configuration(external_configuration_url)
name_list.extend(external_config_dict.get("dns", []))
url_list.extend(external_config_dict.get("url",[]))
ping_list.extend(external_config_dict.get("ping", []))
ping6_list.extend(external_config_dict.get("ping6", []))
time.sleep(delay) time.sleep(delay)
for name in name_list.split(): dns_logger = create_logger("dns", log_folder)
for name in name_list:
info_list = _test_dns(name) info_list = _test_dns(name)
logger.info(';'.join(str(x) for x in info_list)) dns_logger.info(';'.join(str(x) for x in info_list))
# ping ping_logger = create_logger("ping", log_folder)
for host in ping_list.split(): for host in ping_list:
info_list = _test_ping(host) info_list = _test_ping(host)
logger.info(';'.join(str(x) for x in info_list)) ping_logger.info(';'.join(str(x) for x in info_list))
for host in ping6_list.split():
ping6_logger = create_logger("ping6", log_folder)
for host in ping6_list:
info_list = _test_ping6(host) info_list = _test_ping6(host)
logger.info(';'.join(str(x) for x in info_list)) ping6_logger.info(';'.join(str(x) for x in info_list))
# http http_logger = create_logger("http", log_folder)
for url in url_list.split(): for url in url_list:
info_list = _test_url_request(url) info_list = _test_url_request(url)
logger.info(';'.join(str(x) for x in info_list)) http_logger.info(';'.join(str(x) for x in info_list))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment