Commit 15832525 authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents aa144d6b b7961704
# This is part of the OCEAN project.
# Plugin for fluent-bit to send data to Wendelin:
# https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin
[buildout]
extends =
../golang/buildout.cfg
parts =
fluentbit-plugin-wendelin
[fluentbit-plugin-wendelin]
recipe = slapos.recipe.cmmi
share = true
url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/v0.1i-dev_buildout/fluentbit-plugin-wendelin-v0.1i-dev_buildout.tar.gz
md5sum = 9ed2ef46b0edfca072255b849ee65249
configure-command = echo "No configure command."
environment =
PATH=${golang1.17:location}/bin:%(PATH)s
PREFIX=@@LOCATION@@
[buildout]
extends =
../cmake/buildout.cfg
../pkgconfig/buildout.cfg
parts =
jsoncpp
[jsoncpp]
recipe = slapos.recipe.cmmi
shared = true
pre-configure =
mkdir jsoncpp-build
configure-command =
cd jsoncpp-build && ${cmake:location}/bin/cmake ..
configure-options =
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
url = https://github.com/open-source-parsers/jsoncpp/archive/refs/tags/1.9.5.tar.gz
md5sum = d6c8c609f2162eff373db62b90a051c7
make-binary =
cd jsoncpp-build && make
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
[buildout]
extends =
../cmake/buildout.cfg
../curl/buildout.cfg
../git/buildout.cfg
../jsoncpp/buildout.cfg
../tinyxml2/buildout.cfg
parts =
mavsdk
[c-astral-headers]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/c-astral-c-library
revision = v0.1
git-executable = ${git:location}/bin/git
[gcc]
min_version = 7.1
[mavsdk-source]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/mavlink/MAVSDK.git
revision = v0.37.0
git-executable = ${git:location}/bin/git
ignore-cloning-submodules = true
[mavsdk]
recipe = slapos.recipe.cmmi
path = ${mavsdk-source:location}
cmake = ${cmake:location}/bin/cmake
pre-configure =
${git:location}/bin/git submodule update --init --recursive
cp -r ${c-astral-headers:location}/* ${mavsdk-source:location}/src/third_party/mavlink/include/mavlink/v2.0/
sed -i 's#common/mavlink.h#CAstral/mavlink.h#' ${mavsdk-source:location}/src/core/mavlink_include.h
configure-command =
${:cmake}
configure-options =
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_C_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_CXX_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH}
-DPKG_CONFIG_EXECUTABLE=${pkgconfig:location}/bin/pkg-config
-DSUPERBUILD=OFF
-Bbuild/default
-H.
make-binary =
${:cmake} --build build/default --target install
environment =
CMAKE_INCLUDE_PATH=${curl:location}/include:${jsoncpp:location}/include:${tinyxml2:location}/include
CMAKE_LIBRARY_PATH=${:CMAKE_LIBRARY_PATH}
CMAKE_PROGRAM_PATH=${cmake:location}/bin
PATH=${pkgconfig:location}/bin/:%(PATH)s
LDFLAGS=-L${jsoncpp:location}/lib -Wl,-rpath=${jsoncpp:location}/lib
CMAKE_CFLAGS=-I${tinyxml2:location}/include
CMAKE_LIBRARY_PATH=${curl:location}/lib:${jsoncpp:location}/lib:${tinyxml2:location}/lib
# MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent
[buildout]
extends =
../../component/defaults.cfg
../../component/fmtlib/buildout.cfg
../../component/openssl/buildout.cfg
parts =
mca
[mca]
recipe = slapos.recipe.cmmi
url = https://lab.nexedi.com/nexedi/metadata-collect-agent/-/archive/v0.2h-dev_buildout/metadata-collect-agent-v0.2h-dev_buildout.tar.gz
md5sum = f394ea9507d13a0b18f9485e70abaf32
configure-command = :
make-targets =
no-dracut
install-no-dracut
environment =
PYTHON_PATH=${python3:location}
PREFIX=@@LOCATION@@
OPENSSL_PATH=${openssl:location}
FMTLIB_PATH=${fmtlib:location}
...@@ -4,14 +4,26 @@ ...@@ -4,14 +4,26 @@
parts = open62541 parts = open62541
extends = extends =
../cmake/buildout.cfg ../cmake/buildout.cfg
../patch/buildout.cfg
[open62541] [open62541]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://github.com/open62541/open62541/archive/refs/tags/v1.2.2.tar.gz url = https://github.com/open62541/open62541/archive/refs/heads/pack/v1.3.zip
md5sum = 2883bde165bc9bc3d459ccbb47acf7f4 md5sum = c2e9938f082b027110fba538de76684c
configure-command = ${cmake:location}/bin/cmake patch-options = -p1
patches =
${:_profile_base_location_}/ipv6_bugfix.patch#355bec02adee4ea73ff5a56ba6971ad2
configure-command =
${cmake:location}/bin/cmake
configure-options = configure-options =
-Bbuild -DBUILD_SHARED_LIBS=ON
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@ -DCMAKE_INSTALL_PREFIX=@@LOCATION@@
make-options = -C build -DUA_ENABLE_PUBSUB=ON
-DUA_ENABLE_SUBSCRIPTIONS=ON
-DUA_NAMESPACE_ZERO=REDUCED
post-install =
cp src/pubsub/*.h deps/open62541_queue.h @@LOCATION@@/include
environement =
PATH=${patch:location}/bin:%(PATH)s
commit ccdde2eddfd2e0937ba0b452063c60c214f5f2f5 (HEAD -> master)
Author: Thomas Gambier <thomas.gambier@nexedi.com>
Date: Sat May 28 19:18:02 2022 +0200
BUGFIX: UA_PubSubChannelUDPMC_regist supports different interface in IPv6
diff --git a/plugins/ua_pubsub_udp.c b/plugins/ua_pubsub_udp.c
index 2c6ea480..f66f5860 100644
--- a/plugins/ua_pubsub_udp.c
+++ b/plugins/ua_pubsub_udp.c
@@ -319,6 +319,7 @@ UA_PubSubChannelUDPMC_open(const UA_PubSubConnectionConfig *connectionConfig) {
"Interface configuration preparation failed.");
goto cleanup;
}
+ memcpy(&channelDataUDPMC->intf_addr, &group.ipv6.ipv6mr_interface, sizeof(group.ipv6.ipv6mr_interface));
}
#endif
}
@@ -432,6 +433,7 @@ UA_PubSubChannelUDPMC_regist(UA_PubSubChannel *channel, UA_ExtensionObject *tran
memcpy(&groupV6.ipv6mr_multiaddr,
&((const struct sockaddr_in6 *) &connectionConfig->ai_addr)->sin6_addr,
sizeof(struct in6_addr));
+ memcpy(&groupV6.ipv6mr_interface, &connectionConfig->intf_addr, sizeof(int));
if(UA_setsockopt(channel->sockfd,
connectionConfig->ai_family == PF_INET6 ? IPPROTO_IPV6 : IPPROTO_IP,
[buildout]
extends =
../mavsdk/buildout.cfg
../open62541/buildout.cfg
../quickjs/buildout.cfg
parts = qjs-wrapper
[qjs-wrapper]
recipe = slapos.recipe.cmmi
shared = true
configure-command = true
url = https://lab.nexedi.com/nexedi/qjs-wrapper/-/archive/v0.1/qjs-wrapper-v0.1.tar.gz
md5sum = 4f0eb6f3bc5719a8ec043ce4f4d20747
environment =
C_INCLUDE_PATH=include:${open62541:location}/include:${open62541:location}/deps:${open62541:location}/src/pubsub:${quickjs:location}/include
CPLUS_INCLUDE_PATH=include:${mavsdk:location}/include:${mavsdk:location}/include/mavsdk
LDFLAGS=-L${open62541:location}/lib -Wl,-rpath=${open62541:location}/lib -L${mavsdk:location}/lib -Wl,-rpath=${mavsdk:location}/lib
...@@ -5,8 +5,9 @@ parts = quickjs ...@@ -5,8 +5,9 @@ parts = quickjs
[quickjs] [quickjs]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true
configure-command = true configure-command = true
url = https://bellard.org/quickjs/quickjs-2020-09-06.tar.xz url = https://bellard.org/quickjs/quickjs-2021-03-27.tar.xz
md5sum = 47cb6def3263d6a631b647b268b1c554 md5sum = 135182a626aa0c87a49aa2bf58fd39bf
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
[buildout]
extends =
../cmake/buildout.cfg
parts =
tinyxml2
[tinyxml2]
recipe = slapos.recipe.cmmi
shared = true
configure-command =
${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-Dtinyxml2_SHARED_LIBS=ON
url = https://github.com/leethomason/tinyxml2/archive/refs/tags/9.0.0.tar.gz
md5sum = afecd941107a8e74d3d1b4363cf52bd7
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '1.0.253' version = '1.0.272'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.rst").read() long_description = open("README.rst").read()
......
...@@ -36,6 +36,15 @@ class ServerHandler(SimpleHTTPRequestHandler): ...@@ -36,6 +36,15 @@ class ServerHandler(SimpleHTTPRequestHandler):
SimpleHTTPRequestHandler.do_GET(self) SimpleHTTPRequestHandler.do_GET(self)
def do_POST(self): def do_POST(self):
"""Write to a file on the server.
request keys:
path: the path of the file
content: content of the file
clear: (0|1 default 1) overwrite the file if 1
request can be encoded as application/x-www-form-urlencoded or multipart/form-data
"""
logging.info('%s - POST: %s \n%s' % (self.client_address[0], self.path, self.headers)) logging.info('%s - POST: %s \n%s' % (self.client_address[0], self.path, self.headers))
if self.restrictedRootAccess(): if self.restrictedRootAccess():
return return
...@@ -46,14 +55,20 @@ class ServerHandler(SimpleHTTPRequestHandler): ...@@ -46,14 +55,20 @@ class ServerHandler(SimpleHTTPRequestHandler):
environ={'REQUEST_METHOD': 'POST', environ={'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type']} 'CONTENT_TYPE': self.headers['Content-Type']}
) )
name = form['path'].value.decode('utf-8')
content = form['content'].value file_content = form['content'].value
method = 'ab' file_path = form['path'].value
if 'clear' in form and form['clear'].value == '1': if form['content'].file:
method = 'wb' # post data as multipart/form-data , values are bytes
self.writeFile(name, content, method) file_path = file_path.decode('utf-8')
else:
# application/x-www-form-urlencoded , values are str
file_content = file_content.encode('utf-8')
file_open_mode = 'wb' if ('clear' in form and form['clear'].value in ('1', b'1')) else 'ab'
self.writeFile(file_path, file_content, file_open_mode)
self.respond(200, type=self.headers['Content-Type']) self.respond(200, type=self.headers['Content-Type'])
self.wfile.write(b"Content written to %s" % str2bytes(name)) self.wfile.write(b"Content written to %s" % str2bytes(file_path))
def writeFile(self, filename, content, method='ab'): def writeFile(self, filename, content, method='ab'):
file_path = os.path.abspath(os.path.join(self.document_path, filename)) file_path = os.path.abspath(os.path.join(self.document_path, filename))
......
...@@ -71,22 +71,43 @@ class SimpleHTTPServerTest(unittest.TestCase): ...@@ -71,22 +71,43 @@ class SimpleHTTPServerTest(unittest.TestCase):
'server did not start.\nout: %s error: %s' % self.process.communicate()) 'server did not start.\nout: %s error: %s' % self.process.communicate())
self.assertIn('Directory listing for /', resp.text) self.assertIn('Directory listing for /', resp.text)
# post with multipart/form-data encoding
resp = requests.post( resp = requests.post(
server_base_url, server_base_url,
files={ files={
'path': 'hello.txt', 'path': 'hello-form-data.txt',
'content': b'hello', 'content': 'hello-form-data',
}, },
) )
self.assertEqual(resp.status_code, requests.codes.ok) self.assertEqual(resp.status_code, requests.codes.ok)
self.assertEqual(resp.text, 'Content written to hello-form-data.txt')
with open( with open(
os.path.join(self.base_path, self.recipe.options['path'], os.path.join(self.base_path, self.recipe.options['path'],
'hello.txt')) as f: 'hello-form-data.txt')) as f:
self.assertEqual(f.read(), 'hello') self.assertEqual(f.read(), 'hello-form-data')
self.assertIn('hello.txt', requests.get(server_base_url).text) self.assertIn('hello-form-data.txt', requests.get(server_base_url).text)
self.assertEqual( self.assertEqual(
requests.get(server_base_url + '/hello.txt').text, 'hello') requests.get(server_base_url + '/hello-form-data.txt').text, 'hello-form-data')
# post as application/x-www-form-urlencoded
resp = requests.post(
server_base_url,
data={
'path': 'hello-form-urlencoded.txt',
'content': 'hello-form-urlencoded',
},
)
self.assertEqual(resp.status_code, requests.codes.ok)
with open(
os.path.join(self.base_path, self.recipe.options['path'],
'hello-form-urlencoded.txt')) as f:
self.assertEqual(f.read(), 'hello-form-urlencoded')
self.assertIn('hello-form-urlencoded.txt', requests.get(server_base_url).text)
self.assertEqual(resp.text, 'Content written to hello-form-urlencoded.txt')
self.assertEqual(
requests.get(server_base_url + '/hello-form-urlencoded.txt').text, 'hello-form-urlencoded')
# incorrect paths are refused # incorrect paths are refused
for path in '/hello.txt', '../hello.txt': for path in '/hello.txt', '../hello.txt':
......
...@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase): ...@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase):
# Check that there is a RSS feed # Check that there is a RSS feed
self.assertTrue('rss' in parameter_dict) self.assertTrue('rss' in parameter_dict)
self.assertTrue(parameter_dict['rss'].startswith( self.assertTrue(parameter_dict['rss'].startswith(
'https://[%s]:9443/' % (self._ipv6_address, ) f'https://[{self._ipv6_address}]:9443/'
)) ))
result = requests.get( result = requests.get(
......
...@@ -24,7 +24,6 @@ ...@@ -24,7 +24,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
from __future__ import unicode_literals
import os import os
import requests import requests
......
...@@ -51,7 +51,6 @@ setup(name=name, ...@@ -51,7 +51,6 @@ setup(name=name,
# caucase needed to connect to the KeDiFa caucase # caucase needed to connect to the KeDiFa caucase
'caucase', 'caucase',
'cryptography', 'cryptography',
'backports.lzma',
], ],
zip_safe=True, zip_safe=True,
test_suite='test', test_suite='test',
......
...@@ -33,7 +33,7 @@ from requests_toolbelt.adapters import source ...@@ -33,7 +33,7 @@ from requests_toolbelt.adapters import source
import json import json
import multiprocessing import multiprocessing
import subprocess import subprocess
from unittest import skip from unittest import skip, expectedFailure
import ssl import ssl
from http.server import HTTPServer from http.server import HTTPServer
from http.server import BaseHTTPRequestHandler from http.server import BaseHTTPRequestHandler
...@@ -51,18 +51,13 @@ import urllib.parse ...@@ -51,18 +51,13 @@ import urllib.parse
import socket import socket
import sys import sys
import logging import logging
import lzma
import random import random
import string import string
from slapos.slap.standalone import SlapOSNodeInstanceError from slapos.slap.standalone import SlapOSNodeInstanceError
import caucase.client import caucase.client
import caucase.utils import caucase.utils
try:
import lzma
except ImportError:
from backports import lzma
import datetime import datetime
from cryptography import x509 from cryptography import x509
...@@ -307,7 +302,8 @@ class TestDataMixin(object): ...@@ -307,7 +302,8 @@ class TestDataMixin(object):
except IOError: except IOError:
test_data = '' test_data = ''
for replacement, value in list(data_replacement_dict.items()): for replacement in sorted(data_replacement_dict.keys()):
value = data_replacement_dict[replacement]
runtime_data = runtime_data.replace(value, replacement) runtime_data = runtime_data.replace(value, replacement)
maxDiff = self.maxDiff maxDiff = self.maxDiff
...@@ -448,7 +444,8 @@ class TestDataMixin(object): ...@@ -448,7 +444,8 @@ class TestDataMixin(object):
self.another_server_ca.certificate_pem.decode()), self.another_server_ca.certificate_pem.decode()),
'@@another_server_ca.certificate_pem_double@@': unicode_escape( '@@another_server_ca.certificate_pem_double@@': unicode_escape(
unicode_escape(self.another_server_ca.certificate_pem.decode())), unicode_escape(self.another_server_ca.certificate_pem.decode())),
'@@getSoftwareURL@@': self.getSoftwareURL(), # self.getSoftwareURL can contain other replacements so do it first
'@@00getSoftwareURL@@': self.getSoftwareURL(),
'@@test_server_ca.certificate_pem@@': unicode_escape( '@@test_server_ca.certificate_pem@@': unicode_escape(
self.test_server_ca.certificate_pem.decode()), self.test_server_ca.certificate_pem.decode()),
'@@test_server_ca.certificate_pem_double@@': unicode_escape( '@@test_server_ca.certificate_pem_double@@': unicode_escape(
...@@ -4451,6 +4448,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4451,6 +4448,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
curl_command, out, err)) curl_command, out, err))
return out, err return out, err
@expectedFailure
def test_disabled_cookie_list(self): def test_disabled_cookie_list(self):
parameter_dict = self.assertSlaveBase('disabled-cookie-list') parameter_dict = self.assertSlaveBase('disabled-cookie-list')
out, err = self._curl( out, err = self._curl(
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -93,7 +93,7 @@ ...@@ -93,7 +93,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -130,7 +130,7 @@ ...@@ -130,7 +130,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -94,7 +94,7 @@ ...@@ -94,7 +94,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -132,7 +132,7 @@ ...@@ -132,7 +132,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -94,7 +94,7 @@ ...@@ -94,7 +94,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -132,7 +132,7 @@ ...@@ -132,7 +132,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -95,7 +95,7 @@ ...@@ -95,7 +95,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -134,7 +134,7 @@ ...@@ -134,7 +134,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -52,7 +52,7 @@ ...@@ -52,7 +52,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -88,7 +88,7 @@ ...@@ -88,7 +88,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -50,7 +50,7 @@ ...@@ -50,7 +50,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -86,7 +86,7 @@ ...@@ -86,7 +86,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -51,7 +51,7 @@ ...@@ -51,7 +51,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -88,7 +88,7 @@ ...@@ -88,7 +88,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -65,7 +65,7 @@ ...@@ -65,7 +65,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -65,7 +65,7 @@ ...@@ -65,7 +65,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -69,7 +69,7 @@ ...@@ -69,7 +69,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -106,7 +106,7 @@ ...@@ -106,7 +106,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -143,7 +143,7 @@ ...@@ -143,7 +143,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-3", "slap_computer_partition_id": "T-3",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -763,7 +763,7 @@ ...@@ -763,7 +763,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -802,7 +802,7 @@ ...@@ -802,7 +802,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -81,7 +81,7 @@ ...@@ -81,7 +81,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -120,7 +120,7 @@ ...@@ -120,7 +120,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -764,7 +764,7 @@ ...@@ -764,7 +764,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -804,7 +804,7 @@ ...@@ -804,7 +804,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -269,7 +269,7 @@ ...@@ -269,7 +269,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -308,7 +308,7 @@ ...@@ -308,7 +308,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -79,7 +79,7 @@ ...@@ -79,7 +79,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -118,7 +118,7 @@ ...@@ -118,7 +118,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -228,7 +228,7 @@ ...@@ -228,7 +228,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -268,7 +268,7 @@ ...@@ -268,7 +268,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -68,7 +68,7 @@ ...@@ -68,7 +68,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -108,7 +108,7 @@ ...@@ -108,7 +108,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-0", "slap_computer_partition_id": "T-0",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "RootSoftwareInstance", "slap_software_type": "RootSoftwareInstance",
"slave_instance_list": [ "slave_instance_list": [
{ {
...@@ -68,7 +68,7 @@ ...@@ -68,7 +68,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "kedifa", "slap_software_type": "kedifa",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
...@@ -108,7 +108,7 @@ ...@@ -108,7 +108,7 @@
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
"slap_software_release_url": "@@getSoftwareURL@@", "slap_software_release_url": "@@00getSoftwareURL@@",
"slap_software_type": "single-custom-personal", "slap_software_type": "single-custom-personal",
"slave_instance_list": [], "slave_instance_list": [],
"timestamp": "@@TIMESTAMP@@" "timestamp": "@@TIMESTAMP@@"
......
...@@ -45,7 +45,6 @@ setup(name=name, ...@@ -45,7 +45,6 @@ setup(name=name,
'slapos.cookbook', 'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'requests', 'requests',
'six',
'PyPDF2', 'PyPDF2',
], ],
zip_safe=True, zip_safe=True,
......
############################################################################## ##############################################################################
# coding: utf-8
# #
# Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved. # Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved.
# #
...@@ -31,8 +30,8 @@ import csv ...@@ -31,8 +30,8 @@ import csv
import multiprocessing import multiprocessing
import os import os
import json import json
import six.moves.xmlrpc_client as xmlrpclib import xmlrpc.client as xmlrpclib
import six.moves.urllib.parse as urllib_parse import urllib.parse as urllib_parse
import ssl import ssl
import base64 import base64
import io import io
...@@ -63,6 +62,7 @@ class CloudOooTestCase(_CloudOooTestCase): ...@@ -63,6 +62,7 @@ class CloudOooTestCase(_CloudOooTestCase):
context=ssl_context, context=ssl_context,
allow_none=True, allow_none=True,
) )
self.addCleanup(self.server('close'))
def normalizeFontName(font_name): def normalizeFontName(font_name):
...@@ -111,16 +111,16 @@ class HTMLtoPDFConversionFontTestMixin: ...@@ -111,16 +111,16 @@ class HTMLtoPDFConversionFontTestMixin:
def test(self): def test(self):
actual_font_mapping_mapping = {} actual_font_mapping_mapping = {}
for font in self.expected_font_mapping: for font in self.expected_font_mapping:
src_html = ''' src_html = f'''
<style> <style>
p {{ font-family: "{font}"; font-size: 20pt; }} p {{ font-family: "{font}"; font-size: 20pt; }}
</style> </style>
<p>the quick brown fox jumps over the lazy dog.</p> <p>the quick brown fox jumps over the lazy dog.</p>
<p>THE QUICK BROWN FOX JUMPS OVER THE LAZY DOG.</p> <p>THE QUICK BROWN FOX JUMPS OVER THE LAZY DOG.</p>
'''.format(**locals()) '''
pdf_data = self._convert_html_to_pdf(src_html) pdf_data = self._convert_html_to_pdf(src_html)
pdf_reader = PyPDF2.PdfFileReader(io.BytesIO((pdf_data))) pdf_reader = PyPDF2.PdfFileReader(io.BytesIO(pdf_data))
self.assertEqual( self.assertEqual(
self.pdf_producer, self.pdf_producer,
pdf_reader.getDocumentInfo()['/Producer']) pdf_reader.getDocumentInfo()['/Producer'])
...@@ -165,7 +165,7 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -165,7 +165,7 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
'Liberation Sans Narrow': 'LiberationSansNarrow', 'Liberation Sans Narrow': 'LiberationSansNarrow',
'Liberation Serif': 'LiberationSerif', 'Liberation Serif': 'LiberationSerif',
'Linux LibertineG': 'LiberationSans', 'Linux LibertineG': 'LiberationSans',
'OpenSymbol': set(['DejaVuSans', 'OpenSymbol']), 'OpenSymbol': {'DejaVuSans', 'OpenSymbol'},
'Palatino': 'LiberationSans', 'Palatino': 'LiberationSans',
'Roboto Black': 'LiberationSans', 'Roboto Black': 'LiberationSans',
'Roboto Condensed Light': 'LiberationSans', 'Roboto Condensed Light': 'LiberationSans',
...@@ -180,9 +180,9 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -180,9 +180,9 @@ class TestWkhtmlToPDF(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
} }
def _convert_html_to_pdf(self, src_html): def _convert_html_to_pdf(self, src_html):
return base64.decodestring( return base64.decodebytes(
self.server.convertFile( self.server.convertFile(
base64.encodestring(src_html.encode()).decode(), base64.encodebytes(src_html.encode()).decode(),
'html', 'html',
'pdf', 'pdf',
False, False,
...@@ -238,9 +238,9 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase): ...@@ -238,9 +238,9 @@ class TestLibreoffice(HTMLtoPDFConversionFontTestMixin, CloudOooTestCase):
} }
def _convert_html_to_pdf(self, src_html): def _convert_html_to_pdf(self, src_html):
return base64.decodestring( return base64.decodebytes(
self.server.convertFile( self.server.convertFile(
base64.encodestring(src_html.encode()).decode(), base64.encodebytes(src_html.encode()).decode(),
'html', 'html',
'pdf', 'pdf',
).encode()) ).encode())
...@@ -251,10 +251,10 @@ class TestLibreOfficeTextConversion(CloudOooTestCase): ...@@ -251,10 +251,10 @@ class TestLibreOfficeTextConversion(CloudOooTestCase):
def test_html_to_text(self): def test_html_to_text(self):
self.assertEqual( self.assertEqual(
base64.decodestring( base64.decodebytes(
self.server.convertFile( self.server.convertFile(
base64.encodestring( base64.encodebytes(
u'<html>héhé</html>'.encode('utf-8')).decode(), '<html>héhé</html>'.encode()).decode(),
'html', 'html',
'txt', 'txt',
).encode()), ).encode()),
...@@ -274,19 +274,18 @@ class TestLibreOfficeCluster(CloudOooTestCase): ...@@ -274,19 +274,18 @@ class TestLibreOfficeCluster(CloudOooTestCase):
global _convert_html_to_text global _convert_html_to_text
def _convert_html_to_text(src_html): def _convert_html_to_text(src_html):
return base64.decodestring( return base64.decodebytes(
self.server.convertFile( self.server.convertFile(
base64.encodestring(src_html.encode()).decode(), base64.encodebytes(src_html.encode()).decode(),
'html', 'html',
'txt', 'txt',
).encode()) ).encode())
pool = multiprocessing.Pool(5) pool = multiprocessing.Pool(5)
# TODO py3: use with pool with pool:
converted = pool.map(_convert_html_to_text, converted = pool.map(
['<html><body>hello</body></html>'] * 100) _convert_html_to_text,
pool.terminate() ['<html><body>hello</body></html>'] * 100)
pool.join()
self.assertEqual(converted, [codecs.BOM_UTF8 + b'hello\n'] * 100) self.assertEqual(converted, [codecs.BOM_UTF8 + b'hello\n'] * 100)
...@@ -294,9 +293,8 @@ class TestLibreOfficeCluster(CloudOooTestCase): ...@@ -294,9 +293,8 @@ class TestLibreOfficeCluster(CloudOooTestCase):
res = requests.get( res = requests.get(
urllib_parse.urljoin(self.url, '/haproxy;csv'), urllib_parse.urljoin(self.url, '/haproxy;csv'),
verify=False, verify=False,
stream=True,
) )
reader = csv.DictReader(res.raw) reader = csv.DictReader(io.StringIO(res.text))
line_list = list(reader) line_list = list(reader)
# requests have been balanced # requests have been balanced
total_hrsp_2xx = { total_hrsp_2xx = {
...@@ -309,8 +307,8 @@ class TestLibreOfficeCluster(CloudOooTestCase): ...@@ -309,8 +307,8 @@ class TestLibreOfficeCluster(CloudOooTestCase):
# ideally there should be 25% of requests on each backend, because we use # ideally there should be 25% of requests on each backend, because we use
# round robin scheduling, but it can happen that some backend take longer # round robin scheduling, but it can happen that some backend take longer
# to start, so we are tolerant here and just check that each backend # to start, so we are tolerant here and just check that each backend
# process at least 15% of requests. # process at least one request.
self.assertGreater(total_hrsp_2xx[backend], 15) self.assertGreater(total_hrsp_2xx[backend], 0)
# no errors # no errors
total_eresp = { total_eresp = {
line['svname']: int(line['eresp'] or 0) line['svname']: int(line['eresp'] or 0)
......
############################################################################## ##############################################################################
# coding: utf-8
# #
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved. # Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
# #
......
...@@ -24,7 +24,6 @@ ...@@ -24,7 +24,6 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# #
############################################################################## ##############################################################################
from __future__ import absolute_import
from setuptools import setup, find_packages from setuptools import setup, find_packages
version = '0.0.1.dev0' version = '0.0.1.dev0'
...@@ -49,7 +48,6 @@ setup(name=name, ...@@ -49,7 +48,6 @@ setup(name=name,
'psutil', 'psutil',
'requests', 'requests',
'mysqlclient', 'mysqlclient',
'backports.lzma',
'cryptography', 'cryptography',
'pexpect', 'pexpect',
'pyOpenSSL', 'pyOpenSSL',
......
...@@ -25,7 +25,6 @@ ...@@ -25,7 +25,6 @@
# #
############################################################################## ##############################################################################
from __future__ import absolute_import
import json import json
import os import os
......
from __future__ import absolute_import
import glob import glob
import hashlib import hashlib
import json import json
...@@ -9,10 +8,10 @@ import shutil ...@@ -9,10 +8,10 @@ import shutil
import subprocess import subprocess
import tempfile import tempfile
import time import time
import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error import urllib.parse
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler from http.server import BaseHTTPRequestHandler
import mock from unittest import mock
import OpenSSL.SSL import OpenSSL.SSL
import pexpect import pexpect
import psutil import psutil
...@@ -27,7 +26,6 @@ from slapos.testing.utils import (CrontabMixin, ManagedHTTPServer, ...@@ -27,7 +26,6 @@ from slapos.testing.utils import (CrontabMixin, ManagedHTTPServer,
findFreeTCPPort) findFreeTCPPort)
from . import ERP5InstanceTestCase, setUpModule from . import ERP5InstanceTestCase, setUpModule
from six.moves import range
setUpModule # pyflakes setUpModule # pyflakes
...@@ -100,7 +98,7 @@ class CaucaseService(ManagedResource): ...@@ -100,7 +98,7 @@ class CaucaseService(ManagedResource):
os.mkdir(os.path.join(caucased_dir, 'user')) os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service')) os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = '%s:%s' % (self._cls._ipv4_address, findFreeTCPPort(self._cls._ipv4_address)) backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen( self._caucased_process = subprocess.Popen(
[ [
...@@ -213,7 +211,7 @@ class TestTimeout(BalancerTestCase, CrontabMixin): ...@@ -213,7 +211,7 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super(TestTimeout, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead # use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
# and set timeout of 1 second # and set timeout of 1 second
...@@ -224,12 +222,12 @@ class TestTimeout(BalancerTestCase, CrontabMixin): ...@@ -224,12 +222,12 @@ class TestTimeout(BalancerTestCase, CrontabMixin):
# type: () -> None # type: () -> None
self.assertEqual( self.assertEqual(
requests.get( requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/1'), urllib.parse.urljoin(self.default_balancer_url, '/1'),
verify=False).status_code, verify=False).status_code,
requests.codes.ok) requests.codes.ok)
self.assertEqual( self.assertEqual(
requests.get( requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/5'), urllib.parse.urljoin(self.default_balancer_url, '/5'),
verify=False).status_code, verify=False).status_code,
requests.codes.gateway_timeout) requests.codes.gateway_timeout)
...@@ -241,7 +239,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -241,7 +239,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super(TestLog, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead # use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict return parameter_dict
...@@ -249,7 +247,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -249,7 +247,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_format(self): def test_access_log_format(self):
# type: () -> None # type: () -> None
requests.get( requests.get(
six.moves.urllib.parse.urljoin(self.default_balancer_url, '/url_path'), urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False, verify=False,
) )
time.sleep(.5) # wait a bit more until access is logged time.sleep(.5) # wait a bit more until access is logged
...@@ -288,7 +286,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -288,7 +286,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
'apachedex', 'apachedex',
'ApacheDex-*.html', 'ApacheDex-*.html',
)) ))
with open(apachedex_report, 'r') as f: with open(apachedex_report) as f:
report_text = f.read() report_text = f.read()
self.assertIn('APacheDEX', report_text) self.assertIn('APacheDEX', report_text)
# having this table means that apachedex could parse some lines. # having this table means that apachedex could parse some lines.
...@@ -336,7 +334,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -336,7 +334,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
error_line = error_log_file.read().splitlines()[-1] error_line = error_log_file.read().splitlines()[-1]
self.assertIn('proxy family_default has no server available!', error_line) self.assertIn('proxy family_default has no server available!', error_line)
# this log also include a timestamp # this log also include a timestamp
self.assertRegexpMatches(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}') self.assertRegex(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
class BalancerCookieHTTPServer(ManagedHTTPServer): class BalancerCookieHTTPServer(ManagedHTTPServer):
...@@ -377,7 +375,7 @@ class TestBalancer(BalancerTestCase): ...@@ -377,7 +375,7 @@ class TestBalancer(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super(TestBalancer, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use two backend servers # use two backend servers
parameter_dict['dummy_http_server'] = [ parameter_dict['dummy_http_server'] = [
...@@ -409,7 +407,7 @@ class TestBalancer(BalancerTestCase): ...@@ -409,7 +407,7 @@ class TestBalancer(BalancerTestCase):
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the # if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm # backend selected by balancing algorithm
self.assertIn( self.assertIn(
requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'], requests.get(urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'), ('default-0', 'default-1'),
) )
...@@ -457,10 +455,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -457,10 +455,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super( parameter_dict = super()._getInstanceParameterDict()
TestTestRunnerEntryPoints,
cls,
)._getInstanceParameterDict()
parameter_dict['dummy_http_server-test-runner-address-list'] = [ parameter_dict['dummy_http_server-test-runner-address-list'] = [
[ [
...@@ -485,18 +480,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -485,18 +480,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
)['default-test-runner-url-list'] )['default-test-runner-url-list']
url_0, url_1, url_2 = test_runner_url_list url_0, url_1, url_2 = test_runner_url_list
self.assertEqual( self.assertEqual(
six.moves.urllib.parse.urlparse(url_0).netloc, urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_1).netloc) urllib.parse.urlparse(url_1).netloc)
self.assertEqual( self.assertEqual(
six.moves.urllib.parse.urlparse(url_0).netloc, urllib.parse.urlparse(url_0).netloc,
six.moves.urllib.parse.urlparse(url_2).netloc) urllib.parse.urlparse(url_2).netloc)
path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format( path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format( path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format( path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format(
netloc=six.moves.urllib.parse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
self.assertEqual( self.assertEqual(
{ {
...@@ -534,7 +529,7 @@ class TestHTTP(BalancerTestCase): ...@@ -534,7 +529,7 @@ class TestHTTP(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super(TestHTTP, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a HTTP/1.1 server instead # use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
return parameter_dict return parameter_dict
...@@ -574,7 +569,7 @@ class TestHTTP(BalancerTestCase): ...@@ -574,7 +569,7 @@ class TestHTTP(BalancerTestCase):
session.get(self.default_balancer_url).raise_for_status() session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called() new_conn.assert_not_called()
parsed_url = six.moves.urllib.parse.urlparse(self.default_balancer_url) parsed_url = urllib.parse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection # check that we have an open file for the ip connection
self.assertTrue([ self.assertTrue([
...@@ -617,7 +612,7 @@ class TestContentEncoding(BalancerTestCase): ...@@ -617,7 +612,7 @@ class TestContentEncoding(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super(TestContentEncoding, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [ parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False], [cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
] ]
...@@ -644,17 +639,17 @@ class TestContentEncoding(BalancerTestCase): ...@@ -644,17 +639,17 @@ class TestContentEncoding(BalancerTestCase):
'application/font-woff2', 'application/font-woff2',
'application/x-font-opentype', 'application/x-font-opentype',
'application/wasm',): 'application/wasm',):
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False) resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, content_type), verify=False)
self.assertEqual(resp.headers['Content-Type'], content_type) self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual( self.assertEqual(
resp.headers.get('Content-Encoding'), resp.headers.get('Content-Encoding'),
'gzip', 'gzip',
'%s uses wrong encoding: %s' % (content_type, resp.headers.get('Content-Encoding'))) '{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding')))
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self): def test_no_gzip_encoding(self):
# type: () -> None # type: () -> None
resp = requests.get(six.moves.urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False) resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers) self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
...@@ -777,9 +772,9 @@ class TestFrontendXForwardedFor(BalancerTestCase): ...@@ -777,9 +772,9 @@ class TestFrontendXForwardedFor(BalancerTestCase):
# type: () -> dict # type: () -> dict
frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService) frontend_caucase = cls.getManagedResource('frontend_caucase', CaucaseService)
certificate = cls.getManagedResource('client_certificate', CaucaseCertificate) certificate = cls.getManagedResource('client_certificate', CaucaseCertificate)
certificate.request(u'shared frontend', frontend_caucase) certificate.request('shared frontend', frontend_caucase)
parameter_dict = super(TestFrontendXForwardedFor, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# add another "-auth" backend, that will have ssl-authentication enabled # add another "-auth" backend, that will have ssl-authentication enabled
parameter_dict['zope-family-dict']['default-auth'] = ['dummy_http_server'] parameter_dict['zope-family-dict']['default-auth'] = ['dummy_http_server']
parameter_dict['backend-path-dict']['default-auth'] = '/' parameter_dict['backend-path-dict']['default-auth'] = '/'
...@@ -815,7 +810,7 @@ class TestFrontendXForwardedFor(BalancerTestCase): ...@@ -815,7 +810,7 @@ class TestFrontendXForwardedFor(BalancerTestCase):
).json() ).json()
self.assertNotEqual(result['Incoming Headers'].get('x-forwarded-for', '').split(', ')[0], '1.2.3.4') self.assertNotEqual(result['Incoming Headers'].get('x-forwarded-for', '').split(', ')[0], '1.2.3.4')
balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default-auth'] balancer_url = json.loads(self.computer_partition.getConnectionParameterDict()['_'])['default-auth']
with self.assertRaisesRegexp(Exception, "certificate required"): with self.assertRaisesRegex(Exception, "certificate required"):
requests.get( requests.get(
balancer_url, balancer_url,
headers={'X-Forwarded-For': '1.2.3.4'}, headers={'X-Forwarded-For': '1.2.3.4'},
...@@ -833,8 +828,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase): ...@@ -833,8 +828,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
# type: () -> dict # type: () -> dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService) server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate) server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(six.ensure_text(cls._ipv4_address), server_caucase) server_certificate.request(cls._ipv4_address, server_caucase)
parameter_dict = super(TestServerTLSProvidedCertificate, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
with open(server_certificate.cert_file) as f: with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read() parameter_dict['ssl']['cert'] = f.read()
with open(server_certificate.key_file) as f: with open(server_certificate.key_file) as f:
...@@ -855,13 +850,13 @@ class TestClientTLS(BalancerTestCase): ...@@ -855,13 +850,13 @@ class TestClientTLS(BalancerTestCase):
# type: () -> dict # type: () -> dict
frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService) frontend_caucase1 = cls.getManagedResource('frontend_caucase1', CaucaseService)
certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate) certificate1 = cls.getManagedResource('client_certificate1', CaucaseCertificate)
certificate1.request(u'client_certificate1', frontend_caucase1) certificate1.request('client_certificate1', frontend_caucase1)
frontend_caucase2 = cls.getManagedResource('frontend_caucase2', CaucaseService) frontend_caucase2 = cls.getManagedResource('frontend_caucase2', CaucaseService)
certificate2 = cls.getManagedResource('client_certificate2', CaucaseCertificate) certificate2 = cls.getManagedResource('client_certificate2', CaucaseCertificate)
certificate2.request(u'client_certificate2', frontend_caucase2) certificate2.request('client_certificate2', frontend_caucase2)
parameter_dict = super(TestClientTLS, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['ssl-authentication-dict'] = { parameter_dict['ssl-authentication-dict'] = {
'default': True, 'default': True,
} }
...@@ -936,11 +931,11 @@ class TestClientTLS(BalancerTestCase): ...@@ -936,11 +931,11 @@ class TestClientTLS(BalancerTestCase):
process = pexpect.spawnu("faketime +1day %s" % caucase_updater) process = pexpect.spawnu("faketime +1day %s" % caucase_updater)
process.logfile = DebugLogFile() process.logfile = DebugLogFile()
process.expect(u"Got new CRL.*Next wake-up at.*") process.expect("Got new CRL.*Next wake-up at.*")
process.terminate() process.terminate()
process.wait() process.wait()
with self.assertRaisesRegexp(Exception, 'certificate revoked'): with self.assertRaisesRegex(Exception, 'certificate revoked'):
_make_request() _make_request()
...@@ -952,10 +947,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -952,10 +947,7 @@ class TestPathBasedRouting(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> dict # type: () -> dict
parameter_dict = super( parameter_dict = super()._getInstanceParameterDict()
TestPathBasedRouting,
cls,
)._getInstanceParameterDict()
parameter_dict['zope-family-dict'][ parameter_dict['zope-family-dict'][
'second' 'second'
] = parameter_dict['zope-family-dict'][ ] = parameter_dict['zope-family-dict'][
...@@ -985,7 +977,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -985,7 +977,7 @@ class TestPathBasedRouting(BalancerTestCase):
published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_']) published_dict = json.loads(self.computer_partition.getConnectionParameterDict()['_'])
scheme = 'scheme' scheme = 'scheme'
netloc = 'example.com:8080' netloc = 'example.com:8080'
prefix = '/VirtualHostBase/' + scheme + '//' + six.moves.urllib.parse.quote( prefix = '/VirtualHostBase/' + scheme + '//' + urllib.parse.quote(
netloc, netloc,
safe='', safe='',
) )
...@@ -1009,7 +1001,7 @@ class TestPathBasedRouting(BalancerTestCase): ...@@ -1009,7 +1001,7 @@ class TestPathBasedRouting(BalancerTestCase):
# test will need to be updated accordingly. # test will need to be updated accordingly.
self.assertEqual( self.assertEqual(
requests.get( requests.get(
six.moves.urllib.parse.urljoin(published_dict[family], prefix + vhr + path), urllib.parse.urljoin(published_dict[family], prefix + vhr + path),
verify=False, verify=False,
).json()['Path'], ).json()['Path'],
expected_path, expected_path,
......
...@@ -25,7 +25,6 @@ ...@@ -25,7 +25,6 @@
# #
############################################################################## ##############################################################################
from __future__ import absolute_import
import contextlib import contextlib
import glob import glob
...@@ -42,9 +41,8 @@ import unittest ...@@ -42,9 +41,8 @@ import unittest
import psutil import psutil
import requests import requests
import six import urllib.parse
import six.moves.urllib.parse import xmlrpc.client
import six.moves.xmlrpc_client
import urllib3 import urllib3
from slapos.testing.utils import CrontabMixin from slapos.testing.utils import CrontabMixin
...@@ -53,7 +51,7 @@ from . import ERP5InstanceTestCase, setUpModule ...@@ -53,7 +51,7 @@ from . import ERP5InstanceTestCase, setUpModule
setUpModule # pyflakes setUpModule # pyflakes
class TestPublishedURLIsReachableMixin(object): class TestPublishedURLIsReachableMixin:
"""Mixin that checks that default page of ERP5 is reachable. """Mixin that checks that default page of ERP5 is reachable.
""" """
...@@ -61,7 +59,7 @@ class TestPublishedURLIsReachableMixin(object): ...@@ -61,7 +59,7 @@ class TestPublishedURLIsReachableMixin(object):
# We access ERP5 trough a "virtual host", which should make # We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root # ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base. # as base.
virtual_host_url = six.moves.urllib.parse.urljoin( virtual_host_url = urllib.parse.urljoin(
base_url, base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/' '/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id)) .format(site_id))
...@@ -89,7 +87,7 @@ class TestPublishedURLIsReachableMixin(object): ...@@ -89,7 +87,7 @@ class TestPublishedURLIsReachableMixin(object):
# login page can be rendered and contain the text "ERP5" # login page can be rendered and contain the text "ERP5"
r = session.get( r = session.get(
six.moves.urllib.parse.urljoin(base_url, '{}/login_form'.format(site_id)), urllib.parse.urljoin(base_url, f'{site_id}/login_form'),
verify=verify, verify=verify,
allow_redirects=False, allow_redirects=False,
) )
...@@ -184,7 +182,7 @@ class TestBalancerPorts(ERP5InstanceTestCase): ...@@ -184,7 +182,7 @@ class TestBalancerPorts(ERP5InstanceTestCase):
} }
def checkValidHTTPSURL(self, url): def checkValidHTTPSURL(self, url):
parsed = six.moves.urllib.parse.urlparse(url) parsed = urllib.parse.urlparse(url)
self.assertEqual(parsed.scheme, 'https') self.assertEqual(parsed.scheme, 'https')
self.assertTrue(parsed.hostname) self.assertTrue(parsed.hostname)
self.assertTrue(parsed.port) self.assertTrue(parsed.port)
...@@ -194,16 +192,16 @@ class TestBalancerPorts(ERP5InstanceTestCase): ...@@ -194,16 +192,16 @@ class TestBalancerPorts(ERP5InstanceTestCase):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'): for family_name in ('family1', 'family2'):
self.checkValidHTTPSURL( self.checkValidHTTPSURL(
param_dict['family-{family_name}'.format(family_name=family_name)]) param_dict[f'family-{family_name}'])
self.checkValidHTTPSURL( self.checkValidHTTPSURL(
param_dict['family-{family_name}-v6'.format(family_name=family_name)]) param_dict[f'family-{family_name}-v6'])
def test_published_test_runner_url(self): def test_published_test_runner_url(self):
# each family's also a list of test test runner URLs, by default 3 per family # each family's also a list of test test runner URLs, by default 3 per family
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'): for family_name in ('family1', 'family2'):
family_test_runner_url_list = param_dict[ family_test_runner_url_list = param_dict[
'{family_name}-test-runner-url-list'.format(family_name=family_name)] f'{family_name}-test-runner-url-list']
self.assertEqual(3, len(family_test_runner_url_list)) self.assertEqual(3, len(family_test_runner_url_list))
for url in family_test_runner_url_list: for url in family_test_runner_url_list:
self.checkValidHTTPSURL(url) self.checkValidHTTPSURL(url)
...@@ -221,16 +219,16 @@ class TestBalancerPorts(ERP5InstanceTestCase): ...@@ -221,16 +219,16 @@ class TestBalancerPorts(ERP5InstanceTestCase):
# normal access on ipv4 and ipv6 and test runner access on ipv4 only # normal access on ipv4 and ipv6 and test runner access on ipv4 only
with self.slap.instance_supervisor_rpc as supervisor: with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo() all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'].startswith('haproxy-')] process_info, = (p for p in all_process_info if p['name'].startswith('haproxy-'))
haproxy_master_process = psutil.Process(process_info['pid']) haproxy_master_process = psutil.Process(process_info['pid'])
haproxy_worker_process, = haproxy_master_process.children() haproxy_worker_process, = haproxy_master_process.children()
self.assertEqual( self.assertEqual(
sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2), sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2),
sorted([ sorted(
c.family c.family
for c in haproxy_worker_process.connections() for c in haproxy_worker_process.connections()
if c.status == 'LISTEN' if c.status == 'LISTEN'
])) ))
class TestSeleniumTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin): class TestSeleniumTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
...@@ -296,7 +294,7 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix ...@@ -296,7 +294,7 @@ class TestDisableTestRunner(ERP5InstanceTestCase, TestPublishedURLIsReachableMix
# Haproxy only listen on two ports, there is no haproxy ports allocated for test runner # Haproxy only listen on two ports, there is no haproxy ports allocated for test runner
with self.slap.instance_supervisor_rpc as supervisor: with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo() all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'].startswith('haproxy')] process_info, = (p for p in all_process_info if p['name'].startswith('haproxy'))
haproxy_master_process = psutil.Process(process_info['pid']) haproxy_master_process = psutil.Process(process_info['pid'])
haproxy_worker_process, = haproxy_master_process.children() haproxy_worker_process, = haproxy_master_process.children()
self.assertEqual( self.assertEqual(
...@@ -366,7 +364,7 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac ...@@ -366,7 +364,7 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
zodb["pool-timeout"] = "10m" zodb["pool-timeout"] = "10m"
storage["storage"] = "root" storage["storage"] = "root"
storage["server"] = zeo_addr storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f: with open(f'{partition}/etc/zope-{zope}.conf') as f:
conf = list(map(str.strip, f.readlines())) conf = list(map(str.strip, f.readlines()))
i = conf.index("<zodb_db root>") + 1 i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)]) conf = iter(conf[i:conf.index("</zodb_db>", i)])
...@@ -376,12 +374,12 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac ...@@ -376,12 +374,12 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
if line == '</zeoclient>': if line == '</zeoclient>':
break break
checkParameter(line, storage) checkParameter(line, storage)
for k, v in six.iteritems(storage): for k, v in storage.items():
self.assertIsNone(v, k) self.assertIsNone(v, k)
del storage del storage
else: else:
checkParameter(line, zodb) checkParameter(line, zodb)
for k, v in six.iteritems(zodb): for k, v in zodb.items():
self.assertIsNone(v, k) self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a') partition = self.getComputerPartitionPath('zope-a')
...@@ -436,19 +434,19 @@ class TestWatchActivities(ERP5InstanceTestCase): ...@@ -436,19 +434,19 @@ class TestWatchActivities(ERP5InstanceTestCase):
env=dict(os.environ, env=dict(os.environ,
PATH=os.pathsep.join([tmpdir, os.environ['PATH']])), PATH=os.pathsep.join([tmpdir, os.environ['PATH']])),
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
universal_newlines=True, text=True,
) )
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
self.fail(e.output) self.fail(e.output)
self.assertIn(' dict ', output) self.assertIn(' dict ', output)
class ZopeSkinsMixin(object): class ZopeSkinsMixin:
"""Mixins with utility methods to test zope behaviors. """Mixins with utility methods to test zope behaviors.
""" """
@classmethod @classmethod
def _setUpClass(cls): def _setUpClass(cls):
super(ZopeSkinsMixin, cls)._setUpClass() super()._setUpClass()
param_dict = cls.getRootPartitionConnectionParameterDict() param_dict = cls.getRootPartitionConnectionParameterDict()
with cls.getXMLRPCClient() as erp5_xmlrpc_client: with cls.getXMLRPCClient() as erp5_xmlrpc_client:
# wait for ERP5 to be ready (TODO: this should probably be a promise) # wait for ERP5 to be ready (TODO: this should probably be a promise)
...@@ -456,8 +454,8 @@ class ZopeSkinsMixin(object): ...@@ -456,8 +454,8 @@ class ZopeSkinsMixin(object):
time.sleep(1) time.sleep(1)
try: try:
erp5_xmlrpc_client.getTitle() erp5_xmlrpc_client.getTitle()
except (six.moves.xmlrpc_client.ProtocolError, except (xmlrpc.client.ProtocolError,
six.moves.xmlrpc_client.Fault): xmlrpc.client.Fault):
pass pass
else: else:
break break
...@@ -470,7 +468,7 @@ class ZopeSkinsMixin(object): ...@@ -470,7 +468,7 @@ class ZopeSkinsMixin(object):
path is joined with urllib.parse.urljoin to the URL of the portal. path is joined with urllib.parse.urljoin to the URL of the portal.
""" """
param_dict = cls.getRootPartitionConnectionParameterDict() param_dict = cls.getRootPartitionConnectionParameterDict()
parsed = six.moves.urllib.parse.urlparse(param_dict['family-' + family_name]) parsed = urllib.parse.urlparse(param_dict['family-' + family_name])
base_url = parsed._replace( base_url = parsed._replace(
netloc='{}:{}@{}:{}'.format( netloc='{}:{}@{}:{}'.format(
param_dict['inituser-login'], param_dict['inituser-login'],
...@@ -480,7 +478,7 @@ class ZopeSkinsMixin(object): ...@@ -480,7 +478,7 @@ class ZopeSkinsMixin(object):
), ),
path=param_dict['site-id'] + '/', path=param_dict['site-id'] + '/',
).geturl() ).geturl()
return six.moves.urllib_parse.urljoin(base_url, path) return urllib.parse.urljoin(base_url, path)
@classmethod @classmethod
@contextlib.contextmanager @contextlib.contextmanager
...@@ -489,16 +487,12 @@ class ZopeSkinsMixin(object): ...@@ -489,16 +487,12 @@ class ZopeSkinsMixin(object):
ssl_context = ssl.create_default_context() ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE ssl_context.verify_mode = ssl.CERT_NONE
erp5_xmlrpc_client = six.moves.xmlrpc_client.ServerProxy( erp5_xmlrpc_client = xmlrpc.client.ServerProxy(
cls._getAuthenticatedZopeUrl(''), cls._getAuthenticatedZopeUrl(''),
context=ssl_context, context=ssl_context,
) )
# BBB use as a context manager only on python3 with erp5_xmlrpc_client:
if sys.version_info < (3, ):
yield erp5_xmlrpc_client yield erp5_xmlrpc_client
else:
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
@classmethod @classmethod
def _addPythonScript(cls, script_id, params, body): def _addPythonScript(cls, script_id, params, body):
...@@ -507,7 +501,7 @@ class ZopeSkinsMixin(object): ...@@ -507,7 +501,7 @@ class ZopeSkinsMixin(object):
try: try:
custom.manage_addProduct.PythonScripts.manage_addPythonScript( custom.manage_addProduct.PythonScripts.manage_addPythonScript(
script_id) script_id)
except six.moves.xmlrpc_client.ProtocolError as e: except xmlrpc.client.ProtocolError as e:
if e.errcode != 302: if e.errcode != 302:
raise raise
getattr(custom, script_id).ZPythonScriptHTML_editAction( getattr(custom, script_id).ZPythonScriptHTML_editAction(
...@@ -542,7 +536,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -542,7 +536,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
@classmethod @classmethod
def _setUpClass(cls): def _setUpClass(cls):
super(ZopeTestMixin, cls)._setUpClass() super()._setUpClass()
cls.zope_base_url = cls._getAuthenticatedZopeUrl('') cls.zope_base_url = cls._getAuthenticatedZopeUrl('')
param_dict = cls.getRootPartitionConnectionParameterDict() param_dict = cls.getRootPartitionConnectionParameterDict()
cls.zope_deadlock_debugger_url = cls._getAuthenticatedZopeUrl( cls.zope_deadlock_debugger_url = cls._getAuthenticatedZopeUrl(
...@@ -565,7 +559,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -565,7 +559,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
raise ValueError("Unknown mode: %s" % mode) raise ValueError("Unknown mode: %s" % mode)
''', ''',
) )
cls.zope_verify_activity_processing_url = six.moves.urllib_parse.urljoin( cls.zope_verify_activity_processing_url = urllib.parse.urljoin(
cls.zope_base_url, cls.zope_base_url,
'ERP5Site_verifyActivityProcessing', 'ERP5Site_verifyActivityProcessing',
) )
...@@ -578,7 +572,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -578,7 +572,7 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
return log("hello %s" % name) return log("hello %s" % name)
''', ''',
) )
cls.zope_log_message_url = six.moves.urllib_parse.urljoin( cls.zope_log_message_url = urllib.parse.urljoin(
cls.zope_base_url, cls.zope_base_url,
'ERP5Site_logMessage', 'ERP5Site_logMessage',
) )
...@@ -593,18 +587,18 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin): ...@@ -593,18 +587,18 @@ class ZopeTestMixin(ZopeSkinsMixin, CrontabMixin):
return "done" return "done"
''', ''',
) )
cls.zope_long_request_url = six.moves.urllib_parse.urljoin( cls.zope_long_request_url = urllib.parse.urljoin(
cls.zope_base_url, cls.zope_base_url,
'ERP5Site_executeLongRequest', 'ERP5Site_executeLongRequest',
) )
def setUp(self): def setUp(self):
super(ZopeTestMixin, self).setUp() super().setUp()
# run logrotate a first time so that it create state files # run logrotate a first time so that it create state files
self._executeCrontabAtDate('logrotate', '2000-01-01') self._executeCrontabAtDate('logrotate', '2000-01-01')
def tearDown(self): def tearDown(self):
super(ZopeTestMixin, self).tearDown() super().tearDown()
# reset logrotate status # reset logrotate status
logrotate_status = os.path.join( logrotate_status = os.path.join(
self.getComputerPartitionPath('zope-default'), self.getComputerPartitionPath('zope-default'),
...@@ -859,11 +853,11 @@ class TestZopeWSGI(ZopeTestMixin, ERP5InstanceTestCase): ...@@ -859,11 +853,11 @@ class TestZopeWSGI(ZopeTestMixin, ERP5InstanceTestCase):
@unittest.expectedFailure @unittest.expectedFailure
def test_long_request_log_rotation(self): def test_long_request_log_rotation(self):
super(TestZopeWSGI, self).test_long_request_log_rotation(self) super().test_long_request_log_rotation()
@unittest.expectedFailure @unittest.expectedFailure
def test_basic_authentication_user_in_access_log(self): def test_basic_authentication_user_in_access_log(self):
super(TestZopeWSGI, self).test_basic_authentication_user_in_access_log(self) super().test_basic_authentication_user_in_access_log()
class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase): class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase):
...@@ -902,7 +896,7 @@ class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase): ...@@ -902,7 +896,7 @@ class TestZopePublisherTimeout(ZopeSkinsMixin, ERP5InstanceTestCase):
@classmethod @classmethod
def _setUpClass(cls): def _setUpClass(cls):
super(TestZopePublisherTimeout, cls)._setUpClass() super()._setUpClass()
cls._addPythonScript( cls._addPythonScript(
'ERP5Site_doSlowRequest', 'ERP5Site_doSlowRequest',
'', '',
......
############################################################################## ##############################################################################
# coding: utf-8
# #
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved. # Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
# #
...@@ -26,11 +25,10 @@ ...@@ -26,11 +25,10 @@
# #
############################################################################## ##############################################################################
from __future__ import absolute_import
import os import os
import json import json
import glob import glob
import six.moves.urllib.parse import urllib.parse
import socket import socket
import sys import sys
import time import time
...@@ -39,7 +37,7 @@ import datetime ...@@ -39,7 +37,7 @@ import datetime
import subprocess import subprocess
import gzip import gzip
from backports import lzma import lzma
import MySQLdb import MySQLdb
from slapos.testing.utils import CrontabMixin from slapos.testing.utils import CrontabMixin
...@@ -84,7 +82,7 @@ class MariaDBTestCase(ERP5InstanceTestCase): ...@@ -84,7 +82,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
# type: () -> MySQLdb.connections.Connection # type: () -> MySQLdb.connections.Connection
connection_parameter_dict = json.loads( connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_']) self.computer_partition.getConnectionParameterDict()['_'])
db_url = six.moves.urllib.parse.urlparse(connection_parameter_dict['database-list'][0]) db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
self.assertEqual('mysql', db_url.scheme) self.assertEqual('mysql', db_url.scheme)
self.assertTrue(db_url.path.startswith('/')) self.assertTrue(db_url.path.startswith('/'))
...@@ -208,7 +206,7 @@ class TestMariaDB(MariaDBTestCase): ...@@ -208,7 +206,7 @@ class TestMariaDB(MariaDBTestCase):
""" """
select * from test_utf8_collation where col1 = "a" select * from test_utf8_collation where col1 = "a"
""") """)
self.assertEqual(((u'à',),), cnx.store_result().fetch_row(maxrows=2)) self.assertEqual((('à',),), cnx.store_result().fetch_row(maxrows=2))
class TestMroonga(MariaDBTestCase): class TestMroonga(MariaDBTestCase):
...@@ -232,7 +230,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -232,7 +230,7 @@ class TestMroonga(MariaDBTestCase):
SELECT mroonga_normalize("ABCDあぃうぇ㍑") SELECT mroonga_normalize("ABCDあぃうぇ㍑")
""") """)
# XXX this is returned as bytes by mroonga/mariadb (this might be a bug) # XXX this is returned as bytes by mroonga/mariadb (this might be a bug)
self.assertEqual(((u'abcdあぃうぇリットル'.encode('utf-8'),),), self.assertEqual((('abcdあぃうぇリットル'.encode(),),),
cnx.store_result().fetch_row(maxrows=2)) cnx.store_result().fetch_row(maxrows=2))
if 0: if 0:
...@@ -245,7 +243,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -245,7 +243,7 @@ class TestMroonga(MariaDBTestCase):
""" """
SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark") SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark")
""") """)
self.assertEqual(((u'ABCDあぃうぇ㍑'.encode('utf-8'),),), self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),),
cnx.store_result().fetch_row(maxrows=2)) cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self): def test_mroonga_full_text_normalizer(self):
...@@ -282,7 +280,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -282,7 +280,7 @@ class TestMroonga(MariaDBTestCase):
WHERE MATCH (content) AGAINST ("+ブラック" IN BOOLEAN MODE) WHERE MATCH (content) AGAINST ("+ブラック" IN BOOLEAN MODE)
""") """)
self.assertEqual( self.assertEqual(
((datetime.date(2013, 4, 23), u'ブラックコーヒーを飲んだ。'),), ((datetime.date(2013, 4, 23), 'ブラックコーヒーを飲んだ。'),),
cnx.store_result().fetch_row(maxrows=2), cnx.store_result().fetch_row(maxrows=2),
) )
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
# See COPYING file for full licensing terms. # See COPYING file for full licensing terms.
# See https://www.nexedi.com/licensing for rationale and options. # See https://www.nexedi.com/licensing for rationale and options.
from __future__ import absolute_import
import json import json
import os.path import os.path
import unittest import unittest
...@@ -76,5 +75,5 @@ def lookupMount(zurl): ...@@ -76,5 +75,5 @@ def lookupMount(zurl):
# readfile returns content of file @path. # readfile returns content of file @path.
def readfile(path): def readfile(path):
with open(path, 'r') as f: with open(path) as f:
return f.read() return f.read()
...@@ -44,7 +44,6 @@ setup(name=name, ...@@ -44,7 +44,6 @@ setup(name=name,
'slapos.core', 'slapos.core',
'supervisor', 'supervisor',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'typing; python_version<"3"',
], ],
test_suite='test', test_suite='test',
) )
...@@ -35,8 +35,8 @@ import tempfile ...@@ -35,8 +35,8 @@ import tempfile
import time import time
import requests import requests
import six.moves.urllib as urllib import urllib.parse
import six.moves.xmlrpc_client import xmlrpc.client
import urllib3 import urllib3
from slapos.grid.utils import md5digest from slapos.grid.utils import md5digest
...@@ -83,8 +83,8 @@ class ERP5UpgradeTestCase(SlapOSInstanceTestCase): ...@@ -83,8 +83,8 @@ class ERP5UpgradeTestCase(SlapOSInstanceTestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
# request and instanciate with old software url # request and instantiate with old software url
super(ERP5UpgradeTestCase, cls).setUpClass() super().setUpClass()
cls.setUpOldInstance() cls.setUpOldInstance()
...@@ -155,7 +155,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase): ...@@ -155,7 +155,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
# wait for old site creation # wait for old site creation
cls.session.get( cls.session.get(
'{zope_base_url}/person_module'.format(zope_base_url=cls.zope_base_url), f'{cls.zope_base_url}/person_module',
auth=requests.auth.HTTPBasicAuth( auth=requests.auth.HTTPBasicAuth(
username=param_dict['inituser-login'], username=param_dict['inituser-login'],
password=param_dict['inituser-password'], password=param_dict['inituser-password'],
...@@ -171,16 +171,12 @@ class TestERP5Upgrade(ERP5UpgradeTestCase): ...@@ -171,16 +171,12 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
ssl_context = ssl.create_default_context() ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE ssl_context.verify_mode = ssl.CERT_NONE
erp5_xmlrpc_client = six.moves.xmlrpc_client.ServerProxy( erp5_xmlrpc_client = xmlrpc.client.ServerProxy(
cls.authenticated_zope_base_url, cls.authenticated_zope_base_url,
context=ssl_context, context=ssl_context,
) )
# BBB use as a context manager only on python3 with erp5_xmlrpc_client:
if sys.version_info < (3, ):
yield erp5_xmlrpc_client yield erp5_xmlrpc_client
else:
with erp5_xmlrpc_client:
yield erp5_xmlrpc_client
def addPythonScript(script_id, params, body): def addPythonScript(script_id, params, body):
with getXMLRPCClient() as erp5_xmlrpc_client: with getXMLRPCClient() as erp5_xmlrpc_client:
...@@ -188,7 +184,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase): ...@@ -188,7 +184,7 @@ class TestERP5Upgrade(ERP5UpgradeTestCase):
try: try:
custom.manage_addProduct.PythonScripts.manage_addPythonScript( custom.manage_addProduct.PythonScripts.manage_addPythonScript(
script_id) script_id)
except six.moves.xmlrpc_client.ProtocolError as e: except xmlrpc.client.ProtocolError as e:
if e.errcode != 302: if e.errcode != 302:
raise raise
getattr(custom, script_id).ZPythonScriptHTML_editAction( getattr(custom, script_id).ZPythonScriptHTML_editAction(
......
...@@ -18,4 +18,4 @@ md5sum = 8357771b70efd0740561b1cb46f6955e ...@@ -18,4 +18,4 @@ md5sum = 8357771b70efd0740561b1cb46f6955e
[template-deploy-test] [template-deploy-test]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = c5db797980951b764b69aaa4b60a0380 md5sum = 062e677990ca0cd0c0396993c58b46a2
...@@ -22,7 +22,7 @@ parts = ...@@ -22,7 +22,7 @@ parts =
[request-kvm] [request-kvm]
<= slap-connection <= slap-connection
recipe = slapos.cookbook:request recipe = slapos.cookbook:request.serialised
software-url = ${slap-connection:software-release-url} software-url = ${slap-connection:software-release-url}
software-type = kvm software-type = kvm
name = test-kvm name = test-kvm
......
...@@ -25,24 +25,22 @@ command = ...@@ -25,24 +25,22 @@ command =
[runTestSuite] [runTestSuite]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
rendered = $${buildout:directory}/bin/$${:_buildout_section_name_} output = $${buildout:directory}/bin/$${:_buildout_section_name_}
template = inline: inline =
#!/bin/sh #!/bin/sh
export PATH=${python2.7-with-eggs:location}:$PATH
exec ${buildout:bin-directory}/${runTestSuite_py:interpreter} ${:_profile_base_location_}/runTestSuite.py --partition_ipv4 {{ list(partition_ipv4)[0] }} --partition_path $${buildout:directory} --test_reference "{{ slapparameter_dict.get('image-to-test-url') }} {{ slapparameter_dict.get('script-to-test-url')}}" --test_location "${test-location:base}/{{ slapparameter_dict.get('test-relative-directory', 'playbook/roles/standalone-shared') }}" --python_interpreter=${buildout:bin-directory}/${runTestSuite_py:interpreter} "$@" exec ${buildout:bin-directory}/${runTestSuite_py:interpreter} ${:_profile_base_location_}/runTestSuite.py --partition_ipv4 {{ list(partition_ipv4)[0] }} --partition_path $${buildout:directory} --test_reference "{{ slapparameter_dict.get('image-to-test-url') }} {{ slapparameter_dict.get('script-to-test-url')}}" --test_location "${test-location:base}/{{ slapparameter_dict.get('test-relative-directory', 'playbook/roles/standalone-shared') }}" --python_interpreter=${buildout:bin-directory}/${runTestSuite_py:interpreter} "$@"
mode = 0755
context = context =
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
key partition_ipv4 slap-configuration:ipv4 key partition_ipv4 slap-configuration:ipv4
[switch_softwaretype] [switch_softwaretype]
default = $${:deploy-test} default = $${:deploy-test}
deploy-test = dynamic-template-deploy-test:rendered deploy-test = dynamic-template-deploy-test:output
[dynamic-template-deploy-test] [dynamic-template-deploy-test]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/instance-deploy-test.cfg.jinja2 url = ${:_profile_base_location_}/instance-deploy-test.cfg.jinja2
rendered = $${buildout:directory}/template-deploy-test.cfg output = $${buildout:directory}/template-deploy-test.cfg
context = context =
key develop_eggs_directory buildout:develop-eggs-directory key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory key eggs_directory buildout:eggs-directory
...@@ -52,4 +50,3 @@ context = ...@@ -52,4 +50,3 @@ context =
raw in_vm_test_script_md5 ${deploy-script-controller-script:md5sum} raw in_vm_test_script_md5 ${deploy-script-controller-script:md5sum}
raw waittime ${deploy-script-controller-script:waittime} raw waittime ${deploy-script-controller-script:waittime}
raw tries ${deploy-script-controller-script:tries} raw tries ${deploy-script-controller-script:tries}
mode = 0644
[buildout] [buildout]
extends = extends =
buildout.hash.cfg buildout.hash.cfg
https://lab.nexedi.com/nexedi/slapos/raw/1.0.264/software/kvm/software.cfg https://lab.nexedi.com/nexedi/slapos/raw/1.0.271/software/kvm/software.cfg
parts = parts =
python-with-eggs python-with-eggs
template-deploy-test template-deploy-test
...@@ -15,23 +15,8 @@ eggs = ...@@ -15,23 +15,8 @@ eggs =
requests requests
interpreter = ${:_buildout_section_name_} interpreter = ${:_buildout_section_name_}
[python2.7-with-eggs]
# create interpreters named "python" and "python2.7" so that
# instance profile can use them in $PATH
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
command =
rm -fr ${:location} &&
mkdir -p ${:location} &&
ln -s ${buildout:bin-directory}/${python-with-eggs:interpreter} ${:location}/python &&
ln -s ${buildout:bin-directory}/${python-with-eggs:interpreter} ${:location}/python2.7
update-command = ${:command}
[playbook] [playbook]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true stop-on-error = true
environment = export PATH=${tar:location}/bin:${gzip:location}/bin:$PATH environment = export PATH=${tar:location}/bin:${gzip:location}/bin:$PATH
location = ${buildout:parts-directory}/${:_buildout_section_name_} location = ${buildout:parts-directory}/${:_buildout_section_name_}
...@@ -70,7 +55,6 @@ location = ${:_profile_base_location_}/${:filename} ...@@ -70,7 +55,6 @@ location = ${:_profile_base_location_}/${:filename}
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
mode = 0644
[versions] [versions]
erp5.util = 0.4.69 erp5.util = 0.4.69
[buildout]
extends =
software.cfg
[python]
part = python2.7
...@@ -46,7 +46,6 @@ setup(name=name, ...@@ -46,7 +46,6 @@ setup(name=name,
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
'six',
], ],
zip_safe=True, zip_safe=True,
test_suite='test', test_suite='test',
......
...@@ -34,11 +34,10 @@ import struct ...@@ -34,11 +34,10 @@ import struct
import subprocess import subprocess
import tempfile import tempfile
import time import time
import six
import sys import sys
from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler from http.server import SimpleHTTPRequestHandler
from six.moves.socketserver import StreamRequestHandler, TCPServer from socketserver import StreamRequestHandler, TCPServer
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
from slapos.testing.utils import findFreeTCPPort from slapos.testing.utils import findFreeTCPPort
...@@ -49,8 +48,8 @@ FLUSH_INTERVAL = 1 ...@@ -49,8 +48,8 @@ FLUSH_INTERVAL = 1
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath( os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
'software%s.cfg' % ("-py2" if six.PY2 else ""))))
class FluentdTestCase(SlapOSInstanceTestCase): class FluentdTestCase(SlapOSInstanceTestCase):
__partition_reference__ = 'fluentd' __partition_reference__ = 'fluentd'
...@@ -135,11 +134,11 @@ class WendelinTutorialTestCase(FluentdTestCase): ...@@ -135,11 +134,11 @@ class WendelinTutorialTestCase(FluentdTestCase):
return subprocess.check_output( return subprocess.check_output(
[self._fluentd_bin, '-c', conf_path, '--dry-run'], [self._fluentd_bin, '-c', conf_path, '--dry-run'],
env={'GEM_PATH': self._gem_path}, env={'GEM_PATH': self._gem_path},
universal_newlines=True, text=True,
) )
def _test_configuration(self, expected_str): def _test_configuration(self, expected_str):
self.assertRegexpMatches( self.assertRegex(
self.read_fluentd_conf(self._conf), self.read_fluentd_conf(self._conf),
expected_str, expected_str,
) )
...@@ -168,12 +167,12 @@ class SensorConfTestCase(WendelinTutorialTestCase): ...@@ -168,12 +167,12 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@classmethod @classmethod
def sensor_conf(cls, script_path): def sensor_conf(cls, script_path):
return '''\ return f'''\
<source> <source>
@type exec @type exec
tag tag.name tag tag.name
command %s %s command {sys.executable} {script_path}
run_interval %ss run_interval {FLUSH_INTERVAL}s
<parse> <parse>
keys pressure, humidity, temperature keys pressure, humidity, temperature
</parse> </parse>
...@@ -182,25 +181,25 @@ class SensorConfTestCase(WendelinTutorialTestCase): ...@@ -182,25 +181,25 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@type forward @type forward
<server> <server>
name myserver1 name myserver1
host %s host {cls._ipv6_address}
</server> </server>
<buffer> <buffer>
flush_mode immediate flush_mode immediate
</buffer> </buffer>
</match>''' % (sys.executable, script_path, FLUSH_INTERVAL, cls._ipv6_address) </match>'''
@classmethod @classmethod
def sensor_script(cls, measurementList): def sensor_script(cls, measurementList):
return '''\ measurement_text = "\t".join(measurementList)
#!/usr/bin/python return f'''\
#!{sys.executable}
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
print("%s")''' % "\t".join(measurementList) print("{measurement_text}")'''
def test_configuration(self): def test_configuration(self):
self._test_configuration( self._test_configuration(
r'adding forwarding server \'myserver1\' host="%s" port=%s weight=60' fr'adding forwarding server \'myserver1\' host="{self._ipv6_address}" port={FLUENTD_PORT} weight=60'
% (self._ipv6_address, FLUENTD_PORT)
) )
def test_send_data(self): def test_send_data(self):
...@@ -229,25 +228,24 @@ class GatewayConfTestCase(WendelinTutorialTestCase): ...@@ -229,25 +228,24 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
@classmethod @classmethod
def gateway_conf(cls, fluentd_port, wendelin_port): def gateway_conf(cls, fluentd_port, wendelin_port):
return '''\ return f'''\
<source> <source>
@type forward @type forward
port %s port {fluentd_port}
bind %s bind {cls._ipv6_address}
</source> </source>
<match tag.name> <match tag.name>
@type wendelin @type wendelin
streamtool_uri http://[%s]:%s/erp5/portal_ingestion_policies/default streamtool_uri http://[{cls._ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default
user foo user foo
password bar password bar
<buffer> <buffer>
flush_mode interval flush_mode interval
@type file @type file
path fluentd-buffer-file/ path fluentd-buffer-file/
flush_interval %ss flush_interval {FLUSH_INTERVAL}s
</buffer> </buffer>
</match>''' % (fluentd_port, cls._ipv6_address, cls._ipv6_address, </match>'''
wendelin_port, FLUSH_INTERVAL)
@classmethod @classmethod
def get_configuration(cls): def get_configuration(cls):
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
import os import os
import logging import logging
from six.moves.urllib.parse import urlparse from urllib.parse import urlparse
import requests import requests
......
# Javascript drone #
## Presentation ##
* Deploy 3 different scripts (`cli.js`, `demo.js` and `manual-flight.js`) on a drone to fly it
* Compile all required libraries to run flight scripts
## Parameters ##
* autopilot-ip: IPv4 address to identify the autpilot from the companion board
* id: User chosen ID for the drone (must be unique in a swarm, will be used as an identifier in multicast communications)
* is-a-simulation: Must be set to 'true' to automatically take off during simulation
* leader-id: Id of the drone chosen to be the leader of the swarm
* multicast-ipv6: IPv6 of the multicast group of the swarm
* drone-id-list: Comma seperated list of the drone IDs of the swarm (recommanded to add the current drone ID)
## How it works ##
Run `quickjs binary location` `desired script location`
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[instance-profile]
filename = instance.cfg
md5sum = 99017d061d3be30746f6daef3a7bd8c4
[cli]
filename = cli.js
md5sum = 33271aeec124301604fdd406f0b339d1
[common]
filename = common.js
md5sum = bacc70f2683c279ba4e0751de616d4ff
[demo]
filename = demo.js
md5sum = 31d8511e6d297643e65febe9a3ed2428
[manual-flight]
filename = manual-flight.js
md5sum = 175813fc8b2f19f91dae27ad4e14ab03
[pubsub]
filename = pubsub.js
md5sum = d949c9a6cdaaa94e7bdd22df5e52fbf4
/*jslint indent2 */
/*global console, std */
import {
loiter,
setAirspeed,
setAltitude,
setTargetLatLong,
reboot
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {
connect,
displayDronePositions,
land,
quit,
startPubsub,
takeOff
} from "{{ common }}"; //jslint-quiet
/*jslint-disable*/
import * as std from "std";
/*jslint-enable*/
var running = false;
const wrongParameters = displayMessage.bind(null, "Wrong parameters");
function checkNumber(value, toExecute) {
return (
Number.isNaN(value)
? wrongParameters
: toExecute.bind(null, value)
);
}
function displayMessage(message) {
console.log(message);
return 0;
}
function exit() {
running = false;
quit();
return 0;
}
function getInput() {
let undefined_cmd;
let altitude;
let cmd;
let latitude;
let longitude;
let s;
let speed;
const help = `
connect
takeoff
land
goto(point)
gotoCoord(latitude, longitude)
altitude(altitude)
speed(speed)
positions
reboot
exit
help
`;
const f = std.fdopen(std.in, "r");
running = true;
while (running) {
std.printf("> ");
s = f.getline();
undefined_cmd = false;
switch (s) {
case "altitude":
std.printf("Altitude: ");
altitude = parseFloat(f.getline());
cmd = checkNumber(altitude, setAltitude);
break;
case "connect":
cmd = connect;
startPubsub();
break;
case "exit":
cmd = exit;
break;
case "gotoCoord":
std.printf("Latitude: ");
latitude = parseFloat(f.getline());
std.printf("Longitude: ");
longitude = parseFloat(f.getline());
cmd = checkNumber(longitude, checkNumber(latitude, setTargetLatLong));
break;
case "help":
cmd = displayMessage.bind(null, help);
break;
case "land":
cmd = land;
break;
case "loiter":
cmd = loiter;
break;
case "positions":
cmd = displayDronePositions;
break;
case "reboot":
cmd = reboot;
break;
case "speed":
std.printf("Speed: ");
speed = parseFloat(f.getline());
cmd = checkNumber(speed, setAirspeed);
break;
case "takeoff":
cmd = takeOff.bind(null, 60);
break;
default:
undefined_cmd = true;
cmd = displayMessage.bind(null, " Undefined command");
}
let ret = cmd();
if (ret) {
console.log(" [ERROR] function:\n", cmd, "\nreturn value:", ret);
}
else if (s !== "help" && !undefined_cmd) {
console.log(" Command successful");
}
};
f.close();
}
getInput();
/*jslint-disable*/
{% set comma_separated_drone_id_list = ', '.join(drone_id_list.split()) -%}
/*jslint-enable*/
import {
arm,
doParachute,
getAltitude,
getLatitude,
getLongitude,
getYaw,
initPubsub,
setAltitude,
setTargetLatLong,
start,
stop,
stopPubsub,
takeOffAndWait,
Drone
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {exit} from "std";
import {sleep, Worker} from "os";
const IP = "{{ autopilot_ip }}";
const PORT = "7909";
export const IS_LEADER = {{ is_leader }};
export const LEADER_ID = {{ leader_id }};
export const SIMULATION = {{ is_a_simulation }};
export const EPSILON = 105;
const EPSILON_YAW = 6;
const EPSILON_ALTITUDE = 5;
const TARGET_YAW = 0;
export const ALTITUDE_DIFF = 30;
const URL = "udp://" + IP + ":" + PORT;
const LOG_FILE = "{{ log_dir }}/mavsdk-log";
const droneIdList = [{{ comma_separated_drone_id_list }}];
const droneDict = {};
var pubsubRunning = false;
var pubsubWorker;
export function connect() {
console.log("Will connect to", URL);
exit_on_fail(start(URL, LOG_FILE, 60), "Failed to connect to " + URL);
}
export function distance(lat1, lon1, lat2, lon2) {
const R = 6371e3; // meters
const la1 = lat1 * Math.PI/180; // la, lo in radians
const la2 = lat2 * Math.PI/180;
const lo1 = lon1 * Math.PI/180;
const lo2 = lon2 * Math.PI/180;
//haversine formula
const sinLat = Math.sin((la2 - la1)/2);
const sinLon = Math.sin((lo2 - lo1)/2);
const h = sinLat*sinLat + Math.cos(la1)*Math.cos(la2)*sinLon*sinLon
return 2*R*Math.asin(Math.sqrt(h));
}
export function displayDronePositions() {
if(!pubsubRunning)
console.log("You must start pubsub first !");
else {
for (const [id, drone] of Object.entries(droneDict)) {
console.log(id, drone.latitude, drone.longitude, drone.altitudeAbs, drone.altitudeRel);
}
}
return 0;
}
function exit_on_fail(ret, msg) {
if(ret) {
console.log(msg);
quit();
exit(-1);
}
}
export function quit() {
stop();
if(pubsubRunning) {
stopPubsub();
}
}
export function goToAltitude(target_altitude, wait, go) {
if(go) {
exit_on_fail(
setAltitude(target_altitude),
`Failed to go to altitude ${target_altitude} m`
);
}
if(wait) {
waitForAltitude(target_altitude);
}
}
export function land() {
var yaw;
while(true) {
yaw = getYaw();
console.log(`[DEMO] Waiting for yaw... (${yaw} , ${TARGET_YAW})`);
if(Math.abs(yaw - TARGET_YAW) < EPSILON_YAW) {
break;
}
sleep(250);
}
console.log("[DEMO] Deploying parachute...");
exit_on_fail(doParachute(2), "Failed to deploy parachute");
}
export function setLatLong(latitude, longitude, target_altitude) {
var cur_latitude;
var cur_longitude;
var d;
if(target_altitude !== 0) {
setAltitude(target_altitude, false, true);
}
console.log(`Going to (${latitude}, ${longitude}) from
(${getLatitude()}, ${getLongitude()})`);
exit_on_fail(
setTargetLatLong(latitude, longitude),
`Failed to go to (${latitude}, ${longitude})`
);
sleep(500);
while(true) {
cur_latitude = getLatitude();
cur_longitude = getLongitude();
d = distance(cur_latitude, cur_longitude, latitude, longitude);
console.log(`Waiting for drone to get to destination (${d} m),
(${cur_latitude} , ${cur_longitude}), (${latitude}, ${longitude})`);
if(d < EPSILON) {
sleep(6000);
return;
}
sleep(1000);
}
}
export function startPubsub() {
pubsubWorker = new Worker("{{ pubsub_script }}");
pubsubWorker.onmessage = function(e) {
if (!e.data.publishing)
pubsubWorker.onmessage = null;
}
initPubsub(droneIdList.length);
for (let i = 0; i < droneIdList.length; i++) {
let id = droneIdList[i]
droneDict[id] = new Drone(id);
droneDict[id].init(i);
}
pubsubWorker.postMessage({ action: "run" });
pubsubRunning = true;
return droneDict;
}
export function takeOff(altitude) {
exit_on_fail(arm(), "Failed to arm");
takeOffAndWait();
goToAltitude(altitude, true, true);
}
function waitForAltitude(target_altitude) {
var altitude = getAltitude();
while(Math.abs(altitude - target_altitude) > EPSILON_ALTITUDE) {
console.log(
`[DEMO] Waiting for altitude... (${altitude} , ${target_altitude})`);
sleep(1000);
altitude = getAltitude();
}
}
/*jslint indent2 */
/*global console */
import {
getAltitude,
getAltitudeRel,
getInitialAltitude,
getLatitude,
getLongitude,
landed,
loiter,
setCheckpoint,
setTargetCoordinates
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {sleep} from "os";
import {
connect,
distance,
goToAltitude,
land,
quit,
setLatLong,
startPubsub,
takeOff,
ALTITUDE_DIFF,
IS_LEADER,
LEADER_ID,
SIMULATION
} from "{{ common }}"; //jslint-quiet
const FLIGH_ALTITUDE = 100;
const PARACHUTE_ALTITUDE = 35;
const checkpointList = [
{
"latitude": 45.64492790560583,
"longitude": 14.25334942966329,
"altitude": 585.1806861589965
},
{
"latitude": 45.64316335436476,
"longitude": 14.26332880184475,
"altitude": 589.8802607573035
},
{
"latitude": 45.64911917196595,
"longitude": 14.26214792790128,
"altitude": 608.6648153348965
},
{
"latitude": 45.64122685351364,
"longitude": 14.26590493128597,
"altitude": 606.1448368129072
},
{
"latitude": 45.64543355564817,
"longitude": 14.27242391207985,
"altitude": 630.0829598206344
},
{
"latitude": 45.6372792927328,
"longitude": 14.27533492411138,
"altitude": 616.1839898415284
},
{
"latitude": 45.64061299543953,
"longitude": 14.26161958465814,
"altitude": 598.0603137354178
},
{
"latitude": 45.64032340702919,
"longitude": 14.2682896662383,
"altitude": 607.1243119862851
}
];
const landingPoint = [
{
"latitude": 45.6398451,
"longitude": 14.2699217
}
];
let INITIAL_ALTITUDE;
let START_ALTITUDE;
var nextCheckpoint = 0;
var distanceToLandingPoint = 100;
var leaderAltitudeAbs;
var leaderAltitudeRel;
var leaderLatitude;
var leaderLongitude;
function followLeader(leaderId, initialAltitude, altitudeDiff) {
goToAltitude(START_ALTITUDE + ALTITUDE_DIFF, false, true);
while(droneDict[leaderId].altitudeAbs == 0) {
console.log("[DEMO] Waiting for leader to send its altitude");
sleep(1000);
}
while(droneDict[leaderId].altitudeAbs < initialAltitude) {
console.log(`[DEMO] Waiting for leader to reach altitude ${initialAltitude} (currently ${droneDict[leaderId].altitudeAbs})`);
sleep(1000);
}
console.log("[DEMO] Switching to following mode...\n");
do {
leaderAltitudeAbs = droneDict[leaderId].altitudeAbs;
leaderAltitudeRel = droneDict[leaderId].altitudeRel;
leaderLatitude = droneDict[leaderId].latitude;
leaderLongitude = droneDict[leaderId].longitude;
setTargetCoordinates(
leaderLatitude,
leaderLongitude,
leaderAltitudeAbs + altitudeDiff,
0
);
sleep(500);
} while(leaderAltitudeRel > PARACHUTE_ALTITUDE);
console.log("[DEMO] Stop following...\n");
nextCheckpoint = droneDict[leaderId].lastCheckpoint + 1;
}
function waitForAltitude(altitude) {
var curAltitude;
do {
sleep(1000);
curAltitude = getAltitude();
console.log(
`[DEMO] Waiting for altitude... (${curAltitude} , ${altitude})`);
}
while(curAltitude < altitude);
}
function waitForLanding() {
while(!landed()) {
sleep(1000);
}
}
console.log("[DEMO] Connecting...\n");
connect();
const droneDict = startPubsub();
INITIAL_ALTITUDE = getInitialAltitude();
START_ALTITUDE = INITIAL_ALTITUDE + FLIGH_ALTITUDE;
if(SIMULATION) {
takeOff(START_ALTITUDE + 1);
}
waitForAltitude(START_ALTITUDE);
console.log("[DEMO] Setting loiter mode...\n");
loiter();
sleep(3000);
if(!IS_LEADER) {
followLeader(LEADER_ID, START_ALTITUDE, ALTITUDE_DIFF);
}
for (let i = nextCheckpoint; i < checkpointList.length; i++) {
console.log(`[DEMO] Going to Checkpoint ${i}\n`);
setLatLong(checkpointList[i].latitude, checkpointList[i].longitude, checkpointList[i].altitude + FLIGH_ALTITUDE);
console.log(`[DEMO] Reached Checkpoint ${i}\n`);
setCheckpoint(i);
sleep(30000);
}
console.log("[DEMO] Setting altitude...\n");
goToAltitude(getAltitude() - getAltitudeRel() + PARACHUTE_ALTITUDE, true, true);
if(!IS_LEADER) {
setLatLong(
checkpointList[checkpointList.length - 1].latitude,
checkpointList[checkpointList.length - 1].longitude,
0
);
}
while(distanceToLandingPoint > 20) {
console.log(`[DEMO] Waiting to reache landing point (current distance is ${distanceToLandingPoint})`);
distanceToLandingPoint = distance(getLatitude(), getLongitude(), landingPoint.latitude, landingPoint.longitude);
}
console.log("[DEMO] Landing...\n");
land();
waitForLanding();
quit();
[buildout]
parts =
cli
demo
manual-flight
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
home = $${buildout:directory}
etc = $${:home}/etc
var = $${:home}/var
log = $${:var}/log
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = $${slap_connection:computer_id}
partition = $${slap_connection:partition_id}
url = $${slap_connection:server_url}
key = $${slap_connection:key_file}
cert = $${slap_connection:cert_file}
[drone]
recipe = slapos.recipe.build
slapparameter-dict = $${slap-configuration:configuration}
init =
options['autopilot-ip'] = options['slapparameter-dict'].get('autopilot_ip', '192.168.27.1')
options['id'] = options['slapparameter-dict'].get('id', '1')
options['is-a-simulation'] = options['slapparameter-dict'].get('is_a_simulation', 'false')
options['leader-id'] = options['slapparameter-dict'].get('leader_id', '1')
options['is-leader'] = 'true' if options['id'] == options['leader-id'] else 'false'
options['multicast-ipv6'] = options['slapparameter-dict'].get('multicast_ip', 'ff15::1111')
options['drone-id-list'] = options['slapparameter-dict'].get('drone_id_list', '')
[js-dynamic-template]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:etc}/$${:_buildout_section_name_}
template = ${buildout:directory}/$${:_buildout_section_name_}.js
extra-context =
context =
raw qjs_wrapper ${qjs-wrapper:location}/lib/libqjswrapper.so
$${:extra-context}
[common]
<= js-dynamic-template
extra-context =
key autopilot_ip drone:autopilot-ip
key drone_id_list drone:drone-id-list
key is_a_simulation drone:is-a-simulation
key is_leader drone:is-leader
key leader_id drone:leader-id
key log_dir directory:log
key pubsub_script pubsub:rendered
[cli]
<= js-dynamic-template
extra-context =
key common common:rendered
[demo]
<= js-dynamic-template
extra-context =
key common common:rendered
[manual-flight]
<= js-dynamic-template
extra-context =
key common common:rendered
[pubsub]
<= js-dynamic-template
extra-context =
key id drone:id
key ipv6 drone:multicast-ipv6
/*jslint indent2 */
/*global console */
import {
getAltitude,
getInitialAltitude,
landed,
loiter,
setTargetCoordinates
} from "{{ qjs_wrapper }}"; //jslint-quiet
import {sleep} from "os";
import {
connect,
goToAltitude,
quit,
startPubsub,
takeOff,
ALTITUDE_DIFF,
IS_LEADER,
LEADER_ID,
SIMULATION
} from "{{ common }}"; //jslint-quiet
const FLIGH_ALTITUDE = 100;
const PARACHUTE_ALTITUDE = 35;
let INITIAL_ALTITUDE;
let START_ALTITUDE;
var leaderAltitudeAbs;
var leaderAltitudeRel;
var leaderLatitude;
var leaderLongitude;
function followLeader(leaderId, initialAltitude, altitudeDiff) {
goToAltitude(START_ALTITUDE + ALTITUDE_DIFF, false, true);
while(droneDict[leaderId].altitudeAbs == 0) {
console.log("[DEMO] Waiting for leader to send its altitude");
sleep(1000);
}
while(droneDict[leaderId].altitudeAbs < initialAltitude) {
console.log(`[DEMO] Waiting for leader to reach altitude ${initialAltitude} (currently ${droneDict[leaderId].altitudeAbs})`);
sleep(1000);
}
console.log("[DEMO] Switching to following mode...\n");
do {
leaderAltitudeAbs = droneDict[leaderId].altitudeAbs;
leaderAltitudeRel = droneDict[leaderId].altitudeRel;
leaderLatitude = droneDict[leaderId].latitude;
leaderLongitude = droneDict[leaderId].longitude;
setTargetCoordinates(
leaderLatitude,
leaderLongitude,
leaderAltitudeAbs + altitudeDiff,
0
);
sleep(500);
} while(leaderAltitudeRel > PARACHUTE_ALTITUDE);
console.log("[DEMO] Stop following...\n");
}
function waitForAltitude(altitude) {
var curAltitude;
do {
sleep(1000);
curAltitude = getAltitude();
console.log(
`[DEMO] Waiting for altitude... (${curAltitude} , ${altitude})`);
}
while(curAltitude < altitude);
}
function waitForLanding() {
while(!landed()) {
sleep(1000);
}
}
const droneDict = startPubsub();
console.log("[DEMO] Connecting...\n");
connect();
while(getInitialAltitude() == 0) {
console.log("[DEMO] Waiting for first telemetry\n");
sleep(1000);
}
INITIAL_ALTITUDE = getInitialAltitude();
START_ALTITUDE = INITIAL_ALTITUDE + FLIGH_ALTITUDE;
if(SIMULATION) {
takeOff(START_ALTITUDE + 1);
}
waitForAltitude(START_ALTITUDE);
console.log("[DEMO] Setting loiter mode...\n");
loiter();
sleep(3000);
if(!IS_LEADER) {
followLeader(LEADER_ID, START_ALTITUDE, ALTITUDE_DIFF);
}
console.log("[DEMO] Loitering until manual intructions are given\n")
waitForLanding();
quit();
import {runPubsub} from "{{ qjs_wrapper }}";
import {Worker} from "os";
const PORT = "4840";
const IPV6 = "{{ ipv6 }}";
var parent = Worker.parent;
function handle_msg(e) {
switch(e.data.action) {
case "run":
runPubsub(IPV6, PORT, "eth0", {{ id }});
parent.postMessage({running: false});
parent.onmessage = null;
break;
default:
console.log("Undefined action from parent: ", e.data.action);
}
}
parent.onmessage = handle_msg;
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/qjs-wrapper/buildout.cfg
parts =
instance-profile
common
cli
demo
manual-flight
pubsub
slapos-cookbook
[download-file-base]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
destination = ${buildout:directory}/${:filename}
[instance-profile]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg
[common]
<= download-file-base
[cli]
<= download-file-base
[demo]
<= download-file-base
[manual-flight]
<= download-file-base
[pubsub]
<= download-file-base
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
[template] [template]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 7e90da1f6dac4233e1aa3248f48e357c md5sum = a7978940fb9cdcc4e1ec33015ba640ba
[template-kvm] [template-kvm]
filename = instance-kvm.cfg.jinja2 filename = instance-kvm.cfg.jinja2
md5sum = ff9fb2378a48d1ca8c72f6a87d3a0221 md5sum = 150285957daa256420d2d73519277420
[template-kvm-cluster] [template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in filename = instance-kvm-cluster.cfg.jinja2.in
...@@ -51,7 +51,7 @@ md5sum = e041e8011ad2ec7f104be173ef76f5e9 ...@@ -51,7 +51,7 @@ md5sum = e041e8011ad2ec7f104be173ef76f5e9
[template-ansible-promise] [template-ansible-promise]
filename = template/ansible-promise.in filename = template/ansible-promise.in
md5sum = a8cf453d20f01c707f02c4b4014580d8 md5sum = 6328f99728284847b8dd1146aadeae1b
[template-kvm-run] [template-kvm-run]
filename = template/template-kvm-run.in filename = template/template-kvm-run.in
...@@ -78,16 +78,16 @@ filename = instance-kvm-http.cfg.in ...@@ -78,16 +78,16 @@ filename = instance-kvm-http.cfg.in
md5sum = 438192aab9f11e40dc521b46a4854dcf md5sum = 438192aab9f11e40dc521b46a4854dcf
[image-download-controller] [image-download-controller]
filename = template/image-download-controller.py filename = template/image-download-controller.py.in
md5sum = 3cc10323fd4d2db4cfbac536b66eae7c md5sum = dc822ddd7dc987bf808a64a18ba11fd0
[image-download-config-creator] [image-download-config-creator]
filename = template/image-download-config-creator.py filename = template/image-download-config-creator.py.in
md5sum = 22ed19d9b8f7b983c97c52caa686bcd7 md5sum = b74b9d4829fe2c2f7122125cf87d29cd
[whitelist-firewall-download-controller] [whitelist-firewall-download-controller]
filename = template/whitelist-firewall-download-controller.py filename = template/whitelist-firewall-download-controller.py.in
md5sum = bc64e29546833817636261d1b28aa6dc md5sum = b4f6ffef08685bace1b9c01a3bd2620d
[whitelist-domains-default] [whitelist-domains-default]
filename = template/whitelist-domains-default filename = template/whitelist-domains-default
......
...@@ -1026,6 +1026,7 @@ extensions = jinja2.ext.do ...@@ -1026,6 +1026,7 @@ extensions = jinja2.ext.do
context = context =
raw logs ${directory:public}/ansible raw logs ${directory:public}/ansible
raw name {{ name }} raw name {{ name }}
raw python_executable ${buildout:executable}
[ansible-vm-promise] [ansible-vm-promise]
<= monitor-promise-base <= monitor-promise-base
......
...@@ -83,9 +83,9 @@ extra-context = ...@@ -83,9 +83,9 @@ extra-context =
raw dcron_executable_location ${dcron:location}/sbin/crond raw dcron_executable_location ${dcron:location}/sbin/crond
raw debian_amd64_netinst_location ${debian-amd64-bullseye-netinst.iso:target} raw debian_amd64_netinst_location ${debian-amd64-bullseye-netinst.iso:target}
raw whitelist_domains_default ${whitelist-domains-default:target} raw whitelist_domains_default ${whitelist-domains-default:target}
raw whitelist_firewall_download_controller ${whitelist-firewall-download-controller:target} raw whitelist_firewall_download_controller ${whitelist-firewall-download-controller:output}
raw image_download_controller ${image-download-controller:target} raw image_download_controller ${image-download-controller:output}
raw image_download_config_creator ${image-download-config-creator:target} raw image_download_config_creator ${image-download-config-creator:output}
raw logrotate_cfg ${template-logrotate-base:output} raw logrotate_cfg ${template-logrotate-base:output}
raw novnc_location ${noVNC:location} raw novnc_location ${noVNC:location}
raw netcat_bin ${netcat:location}/bin/netcat raw netcat_bin ${netcat:location}/bin/netcat
......
...@@ -61,9 +61,16 @@ scripts = ...@@ -61,9 +61,16 @@ scripts =
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
[template] [template-base]
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
[template-base-python]
<= template-base
output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:_buildout_section_name_}.py
[template]
<= template-base
output = ${buildout:directory}/template.cfg output = ${buildout:directory}/template.cfg
[template-kvm] [template-kvm]
...@@ -118,13 +125,13 @@ context = ...@@ -118,13 +125,13 @@ context =
key template_apache_conf template-apache-conf:target key template_apache_conf template-apache-conf:target
[image-download-controller] [image-download-controller]
<= download-base <= template-base-python
[image-download-config-creator] [image-download-config-creator]
<= download-base <= template-base-python
[whitelist-firewall-download-controller] [whitelist-firewall-download-controller]
<= download-base <= template-base-python
[whitelist-domains-default] [whitelist-domains-default]
<= download-base <= download-base
......
#!/usr/bin/env python #!{{ python_executable }}
# Parse Ansible result log file and define if execution succeed or not # Parse Ansible result log file and define if execution succeed or not
......
#!/usr/bin/env python #!${buildout:executable}
import hashlib import hashlib
import json import json
......
#!/usr/bin/env python #!${buildout:executable}
import hashlib import hashlib
import json import json
......
#!/usr/bin/env python #!${buildout:executable}
import os import os
import subprocess import subprocess
......
...@@ -48,7 +48,6 @@ setup(name=name, ...@@ -48,7 +48,6 @@ setup(name=name,
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
'psutil', 'psutil',
'six',
], ],
zip_safe=True, zip_safe=True,
test_suite='test', test_suite='test',
......
...@@ -25,7 +25,8 @@ ...@@ -25,7 +25,8 @@
# #
############################################################################## ##############################################################################
import six.moves.http_client as httplib import http.client as httplib
import http.server
import json import json
import os import os
import glob import glob
...@@ -33,15 +34,13 @@ import hashlib ...@@ -33,15 +34,13 @@ import hashlib
import psutil import psutil
import re import re
import requests import requests
import six
import slapos.util import slapos.util
import sqlite3 import sqlite3
from six.moves.urllib.parse import parse_qs, urlparse from urllib.parse import parse_qs, urlparse
import unittest import unittest
import subprocess import subprocess
import tempfile import tempfile
import six.moves.socketserver as SocketServer import socketserver as SocketServer
from six.moves import SimpleHTTPServer
import multiprocessing import multiprocessing
import time import time
import shutil import shutil
...@@ -117,7 +116,7 @@ bootstrap_machine_param_dict = { ...@@ -117,7 +116,7 @@ bootstrap_machine_param_dict = {
} }
class KvmMixin(object): class KvmMixin:
def getConnectionParameterDictJson(self): def getConnectionParameterDictJson(self):
return json.loads( return json.loads(
self.computer_partition.getConnectionParameterDict()['_']) self.computer_partition.getConnectionParameterDict()['_'])
...@@ -142,10 +141,10 @@ class KvmMixin(object): ...@@ -142,10 +141,10 @@ class KvmMixin(object):
for hash_file in hash_file_list for hash_file in hash_file_list
]) ])
with self.slap.instance_supervisor_rpc as supervisor: with self.slap.instance_supervisor_rpc as supervisor:
running_process_info = '\n'.join(sorted([ running_process_info = '\n'.join(sorted(
'%(group)s:%(name)s %(statename)s' % q for q '%(group)s:%(name)s %(statename)s' % q for q
in supervisor.getAllProcessInfo() in supervisor.getAllProcessInfo()
if q['name'] != 'watchdog' and q['group'] != 'watchdog'])) if q['name'] != 'watchdog' and q['group'] != 'watchdog'))
return running_process_info.replace( return running_process_info.replace(
hash_value, '{hash}').replace(kvm_hash_value, '{kvm-hash-value}') hash_value, '{hash}').replace(kvm_hash_value, '{kvm-hash-value}')
...@@ -164,14 +163,14 @@ class KvmMixin(object): ...@@ -164,14 +163,14 @@ class KvmMixin(object):
state=state) state=state)
class KvmMixinJson(object): class KvmMixinJson:
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
'_': json.dumps(super(KvmMixinJson, cls).getInstanceParameterDict())} '_': json.dumps(super().getInstanceParameterDict())}
def rerequestInstance(self, parameter_dict, *args, **kwargs): def rerequestInstance(self, parameter_dict, *args, **kwargs):
return super(KvmMixinJson, self).rerequestInstance( return super().rerequestInstance(
parameter_dict={'_': json.dumps(parameter_dict)}, parameter_dict={'_': json.dumps(parameter_dict)},
*args, **kwargs *args, **kwargs
) )
...@@ -196,10 +195,10 @@ class TestInstance(InstanceTestCase, KvmMixin): ...@@ -196,10 +195,10 @@ class TestInstance(InstanceTestCase, KvmMixin):
{ {
'ipv6': self._ipv6_address, 'ipv6': self._ipv6_address,
'maximum-extra-disk-amount': '0', 'maximum-extra-disk-amount': '0',
'monitor-base-url': 'https://[%s]:8026' % (self._ipv6_address,), 'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': '%s : 10022' % (self._ipv6_address,), 'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': '%s : 10443' % (self._ipv6_address,), 'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': '%s : 10080' % (self._ipv6_address,), 'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
...@@ -275,8 +274,8 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin): ...@@ -275,8 +274,8 @@ class TestMemoryManagement(InstanceTestCase, KvmMixin):
qemu_wrapper = QemuQMPWrapper(os.path.join( qemu_wrapper = QemuQMPWrapper(os.path.join(
self.computer_partition_root_path, 'var', 'qmp_socket')) self.computer_partition_root_path, 'var', 'qmp_socket'))
ram_mb = sum( ram_mb = sum(
[q['size'] q['size']
for q in qemu_wrapper.getMemoryInfo()['hotplugged']]) / 1024 / 1024 for q in qemu_wrapper.getMemoryInfo()['hotplugged']) / 1024 / 1024
cpu_count = len( cpu_count = len(
[q['CPU'] for q in qemu_wrapper.getCPUInfo()['hotplugged']]) [q['CPU'] for q in qemu_wrapper.getCPUInfo()['hotplugged']])
return {'cpu_count': cpu_count, 'ram_mb': ram_mb} return {'cpu_count': cpu_count, 'ram_mb': ram_mb}
...@@ -374,8 +373,7 @@ class MonitorAccessMixin(KvmMixin): ...@@ -374,8 +373,7 @@ class MonitorAccessMixin(KvmMixin):
connection_xml = partition_information.get('connection_xml') connection_xml = partition_information.get('connection_xml')
if not connection_xml: if not connection_xml:
continue continue
connection_dict = json.loads(slapos.util.xml2dict( connection_dict = json.loads(slapos.util.xml2dict(connection_xml)['_'])
connection_xml if six.PY3 else connection_xml.encode('utf-8'))['_'])
monitor_base_url = connection_dict.get('monitor-base-url') monitor_base_url = connection_dict.get('monitor-base-url')
if not monitor_base_url: if not monitor_base_url:
continue continue
...@@ -660,22 +658,22 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin): ...@@ -660,22 +658,22 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
connection_parameter_dict.pop(k) connection_parameter_dict.pop(k)
self.assertIn('feed-url-kvm-1-pull', connection_parameter_dict) self.assertIn('feed-url-kvm-1-pull', connection_parameter_dict)
feed_pull = connection_parameter_dict.pop('feed-url-kvm-1-pull') feed_pull = connection_parameter_dict.pop('feed-url-kvm-1-pull')
self.assertRegexpMatches( self.assertRegex(
feed_pull, feed_pull,
'http://\\[%s\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull' % ( 'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull'.format(
self._ipv6_address,)) self._ipv6_address))
feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push') feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push')
self.assertRegexpMatches( self.assertRegex(
feed_push, feed_push,
'http://\\[%s\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push' % ( 'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push'.format(
self._ipv6_address,)) self._ipv6_address))
self.assertEqual( self.assertEqual(
connection_parameter_dict, connection_parameter_dict,
{ {
'ipv6': self._ipv6_address, 'ipv6': self._ipv6_address,
'monitor-base-url': 'https://[%s]:8160' % (self._ipv6_address,), 'monitor-base-url': f'https://[{self._ipv6_address}]:8160',
'monitor-user': 'admin', 'monitor-user': 'admin',
'takeover-kvm-1-url': 'http://[%s]:9263/' % (self._ipv6_address,), 'takeover-kvm-1-url': f'http://[{self._ipv6_address}]:9263/',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
...@@ -823,10 +821,10 @@ class TestInstanceNbdServerJson( ...@@ -823,10 +821,10 @@ class TestInstanceNbdServerJson(
pass pass
class HttpHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): class HttpHandler(http.server.SimpleHTTPRequestHandler):
def log_message(self, *args): def log_message(self, *args):
if os.environ.get('SLAPOS_TEST_DEBUG'): if os.environ.get('SLAPOS_TEST_DEBUG'):
return SimpleHTTPServer.SimpleHTTPRequestHandler.log_message(self, *args) return http.server.SimpleHTTPRequestHandler.log_message(self, *args)
else: else:
return return
...@@ -873,18 +871,19 @@ class FakeImageServerMixin(KvmMixin): ...@@ -873,18 +871,19 @@ class FakeImageServerMixin(KvmMixin):
cls.server_process = multiprocessing.Process( cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='FakeImageHttpServer') target=server.serve_forever, name='FakeImageHttpServer')
cls.server_process.start() cls.server_process.start()
server.socket.close()
finally: finally:
os.chdir(old_dir) os.chdir(old_dir)
@classmethod @classmethod
def stopImageHttpServer(cls): def stopImageHttpServer(cls):
cls.logger.debug('Stopping process %s' % (cls.server_process,)) cls.logger.debug('Stopping process %s', cls.server_process)
cls.server_process.join(10) cls.server_process.join(10)
cls.server_process.terminate() cls.server_process.terminate()
time.sleep(0.1) time.sleep(0.1)
if cls.server_process.is_alive(): if cls.server_process.is_alive():
cls.logger.warning( cls.logger.warning(
'Process %s still alive' % (cls.server_process, )) 'Process %s still alive', cls.server_process)
shutil.rmtree(cls.image_source_directory) shutil.rmtree(cls.image_source_directory)
...@@ -932,11 +931,11 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): ...@@ -932,11 +931,11 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.startImageHttpServer() cls.startImageHttpServer()
super(TestBootImageUrlList, cls).setUpClass() super().setUpClass()
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
super(TestBootImageUrlList, cls).tearDownClass() super().tearDownClass()
cls.stopImageHttpServer() cls.stopImageHttpServer()
def tearDown(self): def tearDown(self):
...@@ -947,7 +946,7 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): ...@@ -947,7 +946,7 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
# 2nd ...move instance to "default" state # 2nd ...move instance to "default" state
self.rerequestInstance({}) self.rerequestInstance({})
self.slap.waitForInstance(max_retry=10) self.slap.waitForInstance(max_retry=10)
super(TestBootImageUrlList, self).tearDown() super().tearDown()
def getRunningImageList( def getRunningImageList(
self, kvm_instance_partition, self, kvm_instance_partition,
...@@ -993,8 +992,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): ...@@ -993,8 +992,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
self.assertEqual( self.assertEqual(
[ [
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image_md5sum), f'${{inst}}/srv/{self.image_directory}/{self.fake_image_md5sum}',
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum), f'${{inst}}/srv/{self.image_directory}/{self.fake_image2_md5sum}',
'${shared}/debian-${ver}-amd64-netinst.iso', '${shared}/debian-${ver}-amd64-netinst.iso',
], ],
self.getRunningImageList(kvm_instance_partition) self.getRunningImageList(kvm_instance_partition)
...@@ -1014,8 +1013,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): ...@@ -1014,8 +1013,8 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
self.assertEqual( self.assertEqual(
[ [
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image3_md5sum), f'${{inst}}/srv/{self.image_directory}/{self.fake_image3_md5sum}',
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum), f'${{inst}}/srv/{self.image_directory}/{self.fake_image2_md5sum}',
'${shared}/debian-${ver}-amd64-netinst.iso', '${shared}/debian-${ver}-amd64-netinst.iso',
], ],
self.getRunningImageList(kvm_instance_partition) self.getRunningImageList(kvm_instance_partition)
...@@ -1159,9 +1158,9 @@ class TestBootImageUrlSelect(TestBootImageUrlList): ...@@ -1159,9 +1158,9 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
def test_together(self): def test_together(self):
partition_parameter_kw = { partition_parameter_kw = {
'boot-image-url-list': "%s#%s" % ( 'boot-image-url-list': "{}#{}".format(
self.fake_image, self.fake_image_md5sum), self.fake_image, self.fake_image_md5sum),
'boot-image-url-select': '["%s#%s"]' % ( 'boot-image-url-select': '["{}#{}"]'.format(
self.fake_image, self.fake_image_md5sum) self.fake_image, self.fake_image_md5sum)
} }
self.rerequestInstance(partition_parameter_kw) self.rerequestInstance(partition_parameter_kw)
...@@ -1183,10 +1182,10 @@ class TestBootImageUrlSelect(TestBootImageUrlList): ...@@ -1183,10 +1182,10 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
self.assertEqual( self.assertEqual(
[ [
'${inst}/srv/boot-image-url-select-repository/%s' % ( '${{inst}}/srv/boot-image-url-select-repository/{}'.format(
self.fake_image_md5sum,), self.fake_image_md5sum),
'${inst}/srv/boot-image-url-list-repository/%s' % ( '${{inst}}/srv/boot-image-url-list-repository/{}'.format(
self.fake_image_md5sum,), self.fake_image_md5sum),
'${shared}/debian-${ver}-amd64-netinst.iso', '${shared}/debian-${ver}-amd64-netinst.iso',
], ],
self.getRunningImageList(kvm_instance_partition) self.getRunningImageList(kvm_instance_partition)
...@@ -1258,12 +1257,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin): ...@@ -1258,12 +1257,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
config_file_name = 'boot-image-url-list.conf' config_file_name = 'boot-image-url-list.conf'
def setUp(self): def setUp(self):
super(TestBootImageUrlListKvmCluster, self).setUp() super().setUp()
self.startImageHttpServer() self.startImageHttpServer()
def tearDown(self): def tearDown(self):
self.stopImageHttpServer() self.stopImageHttpServer()
super(TestBootImageUrlListKvmCluster, self).tearDown() super().tearDown()
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
...@@ -1302,12 +1301,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin): ...@@ -1302,12 +1301,12 @@ class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
KVM1_config = os.path.join( KVM1_config = os.path.join(
self.slap.instance_directory, self.__partition_reference__ + '2', 'etc', self.slap.instance_directory, self.__partition_reference__ + '2', 'etc',
self.config_file_name) self.config_file_name)
with open(KVM0_config, 'r') as fh: with open(KVM0_config) as fh:
self.assertEqual( self.assertEqual(
self.input_value % (self.fake_image, self.fake_image_md5sum), self.input_value % (self.fake_image, self.fake_image_md5sum),
fh.read().strip() fh.read().strip()
) )
with open(KVM1_config, 'r') as fh: with open(KVM1_config) as fh:
self.assertEqual( self.assertEqual(
self.input_value % (self.fake_image2, self.fake_image2_md5sum), self.input_value % (self.fake_image2, self.fake_image2_md5sum),
fh.read().strip() fh.read().strip()
...@@ -1340,11 +1339,11 @@ class TestNatRules(KvmMixin, InstanceTestCase): ...@@ -1340,11 +1339,11 @@ class TestNatRules(KvmMixin, InstanceTestCase):
self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict) self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict)
self.assertEqual( self.assertEqual(
'%s : 10100' % (self._ipv6_address,), f'{self._ipv6_address} : 10100',
connection_parameter_dict['nat-rule-port-tcp-100'] connection_parameter_dict['nat-rule-port-tcp-100']
) )
self.assertEqual( self.assertEqual(
'%s : 10200' % (self._ipv6_address,), f'{self._ipv6_address} : 10200',
connection_parameter_dict['nat-rule-port-tcp-200'] connection_parameter_dict['nat-rule-port-tcp-200']
) )
...@@ -1388,13 +1387,13 @@ class TestNatRulesKvmCluster(InstanceTestCase): ...@@ -1388,13 +1387,13 @@ class TestNatRulesKvmCluster(InstanceTestCase):
def test(self): def test(self):
host_fwd_entry = self.getRunningHostFwd() host_fwd_entry = self.getRunningHostFwd()
self.assertIn( self.assertIn(
'hostfwd=tcp:%s:10100-:100' % (self._ipv4_address,), f'hostfwd=tcp:{self._ipv4_address}:10100-:100',
host_fwd_entry) host_fwd_entry)
self.assertIn( self.assertIn(
'hostfwd=tcp:%s:10200-:200' % (self._ipv4_address,), f'hostfwd=tcp:{self._ipv4_address}:10200-:200',
host_fwd_entry) host_fwd_entry)
self.assertIn( self.assertIn(
'hostfwd=tcp:%s:10300-:300' % (self._ipv4_address,), f'hostfwd=tcp:{self._ipv4_address}:10300-:300',
host_fwd_entry) host_fwd_entry)
...@@ -1419,10 +1418,10 @@ class TestWhitelistFirewall(InstanceTestCase): ...@@ -1419,10 +1418,10 @@ class TestWhitelistFirewall(InstanceTestCase):
try: try:
self.content_json = json.loads(content) self.content_json = json.loads(content)
except ValueError: except ValueError:
self.fail('Failed to parse json of %r' % (content,)) self.fail(f'Failed to parse json of {content!r}')
self.assertTrue(isinstance(self.content_json, list)) self.assertTrue(isinstance(self.content_json, list))
# check /etc/resolv.conf # check /etc/resolv.conf
with open('/etc/resolv.conf', 'r') as f: with open('/etc/resolv.conf') as f:
resolv_conf_ip_list = [] resolv_conf_ip_list = []
for line in f.readlines(): for line in f.readlines():
if line.startswith('nameserver'): if line.startswith('nameserver'):
...@@ -1451,7 +1450,7 @@ class TestWhitelistFirewallRequest(TestWhitelistFirewall): ...@@ -1451,7 +1450,7 @@ class TestWhitelistFirewallRequest(TestWhitelistFirewall):
} }
def test(self): def test(self):
super(TestWhitelistFirewallRequest, self).test() super().test()
self.assertIn('2.2.2.2', self.content_json) self.assertIn('2.2.2.2', self.content_json)
self.assertIn('3.3.3.3', self.content_json) self.assertIn('3.3.3.3', self.content_json)
self.assertIn('4.4.4.4', self.content_json) self.assertIn('4.4.4.4', self.content_json)
...@@ -1573,7 +1572,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin): ...@@ -1573,7 +1572,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
maxDiff = None maxDiff = None
def setUp(self): def setUp(self):
super(TestImageDownloadController, self).setUp() super().setUp()
self.working_directory = tempfile.mkdtemp() self.working_directory = tempfile.mkdtemp()
self.destination_directory = os.path.join( self.destination_directory = os.path.join(
self.working_directory, 'destination') self.working_directory, 'destination')
...@@ -1595,7 +1594,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin): ...@@ -1595,7 +1594,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
def tearDown(self): def tearDown(self):
self.stopImageHttpServer() self.stopImageHttpServer()
shutil.rmtree(self.working_directory) shutil.rmtree(self.working_directory)
super(TestImageDownloadController, self).tearDown() super().tearDown()
def callImageDownloadController(self, *args): def callImageDownloadController(self, *args):
call_list = [sys.executable, self.image_download_controller] + list(args) call_list = [sys.executable, self.image_download_controller] + list(args)
...@@ -1618,7 +1617,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin): ...@@ -1618,7 +1617,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
def assertFileContent(self, path, content): def assertFileContent(self, path, content):
self.assertTrue(os.path.exists, path) self.assertTrue(os.path.exists, path)
with open(path, 'r') as fh: with open(path) as fh:
self.assertEqual( self.assertEqual(
fh.read(), fh.read(),
content) content)
...@@ -1788,7 +1787,7 @@ class TestParameterDefault(InstanceTestCase, KvmMixin): ...@@ -1788,7 +1787,7 @@ class TestParameterDefault(InstanceTestCase, KvmMixin):
self.slap.instance_directory, '*', 'bin', 'kvm_raw')) self.slap.instance_directory, '*', 'bin', 'kvm_raw'))
self.assertEqual(len(kvm_raw), 1) self.assertEqual(len(kvm_raw), 1)
kvm_raw = kvm_raw[0] kvm_raw = kvm_raw[0]
with open(kvm_raw, 'r') as fh: with open(kvm_raw) as fh:
kvm_raw = fh.read() kvm_raw = fh.read()
self.assertIn(expected, kvm_raw) self.assertIn(expected, kvm_raw)
...@@ -1929,7 +1928,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin): ...@@ -1929,7 +1928,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
continue continue
partition_store_list = [] partition_store_list = []
for number in range(10): for number in range(10):
storage = os.path.join(external_storage_path, 'data%s' % (number,)) storage = os.path.join(external_storage_path, f'data{number}')
if not os.path.exists(storage): if not os.path.exists(storage):
os.mkdir(storage) os.mkdir(storage)
partition_store = os.path.join(storage, partition) partition_store = os.path.join(storage, partition)
...@@ -1949,7 +1948,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin): ...@@ -1949,7 +1948,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
if line.strip() == '[slapos]': if line.strip() == '[slapos]':
slapos_config.append('[slapos]\n') slapos_config.append('[slapos]\n')
slapos_config.append( slapos_config.append(
'instance_storage_home = %s\n' % (external_storage_path,)) f'instance_storage_home = {external_storage_path}\n')
else: else:
slapos_config.append(line) slapos_config.append(line)
with open(cls.slap._slapos_config, 'w') as fh: with open(cls.slap._slapos_config, 'w') as fh:
...@@ -1968,7 +1967,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin): ...@@ -1968,7 +1967,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
@classmethod @classmethod
def _setUpClass(cls): def _setUpClass(cls):
super(TestExternalDisk, cls)._setUpClass() super()._setUpClass()
cls.working_directory = tempfile.mkdtemp() cls.working_directory = tempfile.mkdtemp()
# setup the external_storage_list, to mimic part of slapformat # setup the external_storage_list, to mimic part of slapformat
cls._prepareExternalStorageList() cls._prepareExternalStorageList()
...@@ -1978,7 +1977,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin): ...@@ -1978,7 +1977,7 @@ class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
cls._dropExternalStorageList() cls._dropExternalStorageList()
super(TestExternalDisk, cls).tearDownClass() super().tearDownClass()
shutil.rmtree(cls.working_directory) shutil.rmtree(cls.working_directory)
def test(self): def test(self):
...@@ -2026,7 +2025,7 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin): ...@@ -2026,7 +2025,7 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
super(TestExternalDiskModern, cls).setUpClass() super().setUpClass()
def getExternalDiskInstanceParameterDict( def getExternalDiskInstanceParameterDict(
self, first, second, third, update_dict=None): self, first, second, third, update_dict=None):
...@@ -2090,11 +2089,11 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin): ...@@ -2090,11 +2089,11 @@ class TestExternalDiskModern(InstanceTestCase, ExternalDiskMixin):
[ [
'file=${partition}/srv/virtual.qcow2,if=virtio,discard=on,' 'file=${partition}/srv/virtual.qcow2,if=virtio,discard=on,'
'format=qcow2', 'format=qcow2',
'file=%s/first_disk,if=virtio,cache=writeback,format=qcow' % ( 'file={}/first_disk,if=virtio,cache=writeback,format=qcow'.format(
self.working_directory,), self.working_directory),
'file=${partition}/second_disk,if=virtio,cache=writeback', 'file=${partition}/second_disk,if=virtio,cache=writeback',
'file=%s/third_disk,if=virtio,cache=none' % ( 'file={}/third_disk,if=virtio,cache=none'.format(
self.working_directory,) self.working_directory)
] ]
) )
update_dict = { update_dict = {
...@@ -2128,9 +2127,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern): ...@@ -2128,9 +2127,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern):
return 'kvm-cluster' return 'kvm-cluster'
def getExternalDiskInstanceParameterDict(self, *args, **kwargs): def getExternalDiskInstanceParameterDict(self, *args, **kwargs):
partition_dict = super( partition_dict = super().getExternalDiskInstanceParameterDict(*args, **kwargs)
TestExternalDiskModernCluster, self
).getExternalDiskInstanceParameterDict(*args, **kwargs)
partition_dict.update({"disable-ansible-promise": True}) partition_dict.update({"disable-ansible-promise": True})
return { return {
"kvm-partition-dict": { "kvm-partition-dict": {
...@@ -2150,7 +2147,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin): ...@@ -2150,7 +2147,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
super(TestExternalDiskModernIndexRequired, cls).setUpClass() super().setUpClass()
def getExternalDiskInstanceParameterDict(self, first, second, third): def getExternalDiskInstanceParameterDict(self, first, second, third):
return { return {
...@@ -2234,31 +2231,31 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin): ...@@ -2234,31 +2231,31 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
@classmethod @classmethod
def stopHttpServer(cls): def stopHttpServer(cls):
cls.logger.debug('Stopping process %s' % (cls.server_process,)) cls.logger.debug(f'Stopping process {cls.server_process}')
cls.server_process.join(10) cls.server_process.join(10)
cls.server_process.terminate() cls.server_process.terminate()
time.sleep(0.1) time.sleep(0.1)
if cls.server_process.is_alive(): if cls.server_process.is_alive():
cls.logger.warning( cls.logger.warning(
'Process %s still alive' % (cls.server_process, )) f'Process {cls.server_process} still alive')
shutil.rmtree(cls.http_directory) shutil.rmtree(cls.http_directory)
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.startHttpServer() cls.startHttpServer()
super(TestInstanceHttpServer, cls).setUpClass() super().setUpClass()
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
super(TestInstanceHttpServer, cls).tearDownClass() super().tearDownClass()
cls.stopHttpServer() cls.stopHttpServer()
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
'enable-http-server': True, 'enable-http-server': True,
'bootstrap-script-url': '%s#%s' % ( 'bootstrap-script-url': '{}#{}'.format(
cls.bootstrap_script_url, cls.bootstrap_script_md5sum), cls.bootstrap_script_url, cls.bootstrap_script_md5sum),
'data-to-vm': """data 'data-to-vm': """data
to to
...@@ -2280,10 +2277,10 @@ vm""", ...@@ -2280,10 +2277,10 @@ vm""",
{ {
'ipv6': self._ipv6_address, 'ipv6': self._ipv6_address,
'maximum-extra-disk-amount': '0', 'maximum-extra-disk-amount': '0',
'monitor-base-url': 'https://[%s]:8026' % (self._ipv6_address,), 'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': '%s : 10022' % (self._ipv6_address,), 'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': '%s : 10443' % (self._ipv6_address,), 'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': '%s : 10080' % (self._ipv6_address,), 'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
...@@ -2311,11 +2308,11 @@ ihs0:whitelist-firewall-{hash} RUNNING""", ...@@ -2311,11 +2308,11 @@ ihs0:whitelist-firewall-{hash} RUNNING""",
'ipv6_config.sh', 'netmask', 'network', 'vm-bootstrap'], 'ipv6_config.sh', 'netmask', 'network', 'vm-bootstrap'],
sorted(os.listdir(public_dir)) sorted(os.listdir(public_dir))
) )
with open(os.path.join(public_dir, 'data'), 'r') as fh: with open(os.path.join(public_dir, 'data')) as fh:
self.assertEqual("""data self.assertEqual("""data
to to
vm""", fh.read()) vm""", fh.read())
with open(os.path.join(public_dir, 'vm-bootstrap'), 'r') as fh: with open(os.path.join(public_dir, 'vm-bootstrap')) as fh:
self.assertEqual('bootstrap_script', fh.read()) self.assertEqual('bootstrap_script', fh.read())
......
# This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent
# "dep--" means it does not compile all the building dependencies
# "--static" indicates that it is the statically linked version of it
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
mca--static.cfg
parts =
fluentbit-plugin-wendelin
mca
[python2.7]
recipe =
environment =
location = /usr
[python3]
recipe =
environment =
location = /usr
[golang1.17]
recipe = plone.recipe.command
command = ${:location}/bin/go env -w GOPATH=${buildout:directory}/go
environment =
location = /usr
[cmake]
recipe =
environment =
location = /usr
# This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent
# "dep--" means it does not compile all the building dependencies
[buildout]
# delete slapos.rebootstrap as we use python from the host
extensions =
slapos.extension.strip
slapos.extension.shared
extends =
software.cfg
parts =
fluentbit-plugin-wendelin
mca
[python2.7]
recipe =
environment =
location = /usr
[python3]
recipe =
environment =
location = /usr
[golang1.17]
recipe = plone.recipe.command
command = ${:location}/bin/go env -w GOPATH=${buildout:directory}/go
environment =
location = /usr
[cmake]
recipe =
environment =
location = /usr
# This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent
# "--static" indicates that it is the statically linked version of it
[buildout]
extends =
software.cfg
parts =
fluentbit-plugin-wendelin
mca
# ENABLE STATIC LINKING
# note: as a general matter, one has to tweak each dependency by hand to get a static executable
[mca]
# static version:
# WARNING: LINK_STATIC only has to be defined to switch to static linking (so it does so even if the value is "NO", "OFF" or "FALSE"). Just don't define it in order to deactivate the option.
environment +=
ZLIB_PATH=${zlib:location}
LINK_STATIC=dumb_value
[fmtlib]
# static version:
shared = false
configure-options =
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DFMT_TEST=OFF
-DCMAKE_POSITION_INDEPENDENT_CODE=TRUE
# This is part of the OCEAN project.
# MCA stands for Metadata Collect Agent
# https://lab.nexedi.com/nexedi/metadata-collect-agent
[buildout]
extends =
../../component/fluentbit-plugin-wendelin/buildout.cfg
../../component/mca/buildout.cfg
parts =
fluentbit-plugin-wendelin
mca
[golang1.17]
# Using "./make.bash" instead of "./all.bash" disables golang tests. Some of these tests attempt to use the network, which fails on OBS' VM.
# A less radical way to fix the issue may be investigated in the future.
make-targets = cd src && unset GOBIN && ./make.bash && cp -alf .. ${:location}
post-install =
${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true
# This line relocates the golang cached modules directory so that the modules are sent to OBS along with the code to compile.
${:location}/bin/go env -w GOPATH=${buildout:directory}/go
...@@ -45,7 +45,6 @@ setup(name=name, ...@@ -45,7 +45,6 @@ setup(name=name,
'slapos.cookbook', 'slapos.cookbook',
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'supervisor', 'supervisor',
'six',
'requests' 'requests'
], ],
zip_safe=True, zip_safe=True,
......
############################################################################## ##############################################################################
# coding: utf-8
# #
# Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved. # Copyright (c) 2020 Nexedi SA and Contributors. All Rights Reserved.
# #
...@@ -28,7 +27,7 @@ ...@@ -28,7 +27,7 @@
import os import os
import json import json
from six.moves.urllib import parse from urllib import parse
import requests import requests
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
......
...@@ -8,12 +8,11 @@ import shutil ...@@ -8,12 +8,11 @@ import shutil
import subprocess import subprocess
import tempfile import tempfile
import time import time
import urllib import urllib.parse
import urlparse from http.server import BaseHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
from typing import Dict from typing import Dict
import mock from unittest import mock
import OpenSSL.SSL import OpenSSL.SSL
import pexpect import pexpect
import psutil import psutil
...@@ -44,10 +43,10 @@ class EchoHTTPServer(ManagedHTTPServer): ...@@ -44,10 +43,10 @@ class EchoHTTPServer(ManagedHTTPServer):
response = json.dumps( response = json.dumps(
{ {
'Path': self.path, 'Path': self.path,
'Incoming Headers': self.headers.dict 'Incoming Headers': dict(self.headers.items()),
}, },
indent=2, indent=2,
) ).encode('utf-8')
self.end_headers() self.end_headers()
self.wfile.write(response) self.wfile.write(response)
...@@ -67,11 +66,11 @@ class EchoHTTP11Server(ManagedHTTPServer): ...@@ -67,11 +66,11 @@ class EchoHTTP11Server(ManagedHTTPServer):
response = json.dumps( response = json.dumps(
{ {
'Path': self.path, 'Path': self.path,
'Incoming Headers': self.headers.dict 'Incoming Headers': dict(self.headers.items()),
}, },
indent=2, indent=2,
) ).encode('utf-8')
self.send_header("Content-Length", len(response)) self.send_header("Content-Length", str(len(response)))
self.end_headers() self.end_headers()
self.wfile.write(response) self.wfile.write(response)
...@@ -100,7 +99,7 @@ class CaucaseService(ManagedResource): ...@@ -100,7 +99,7 @@ class CaucaseService(ManagedResource):
os.mkdir(os.path.join(caucased_dir, 'user')) os.mkdir(os.path.join(caucased_dir, 'user'))
os.mkdir(os.path.join(caucased_dir, 'service')) os.mkdir(os.path.join(caucased_dir, 'service'))
backend_caucased_netloc = '%s:%s' % (self._cls._ipv4_address, findFreeTCPPort(self._cls._ipv4_address)) backend_caucased_netloc = f'{self._cls._ipv4_address}:{findFreeTCPPort(self._cls._ipv4_address)}'
self.url = 'http://' + backend_caucased_netloc self.url = 'http://' + backend_caucased_netloc
self._caucased_process = subprocess.Popen( self._caucased_process = subprocess.Popen(
[ [
...@@ -110,6 +109,7 @@ class CaucaseService(ManagedResource): ...@@ -110,6 +109,7 @@ class CaucaseService(ManagedResource):
'--netloc', backend_caucased_netloc, '--netloc', backend_caucased_netloc,
'--service-auto-approve-count', '1', '--service-auto-approve-count', '1',
], ],
# capture subprocess output not to pollute test's own stdout
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
) )
...@@ -127,6 +127,7 @@ class CaucaseService(ManagedResource): ...@@ -127,6 +127,7 @@ class CaucaseService(ManagedResource):
# type: () -> None # type: () -> None
self._caucased_process.terminate() self._caucased_process.terminate()
self._caucased_process.wait() self._caucased_process.wait()
self._caucased_process.stdout.close()
shutil.rmtree(self.directory) shutil.rmtree(self.directory)
...@@ -166,10 +167,11 @@ class BalancerTestCase(ERP5InstanceTestCase): ...@@ -166,10 +167,11 @@ class BalancerTestCase(ERP5InstanceTestCase):
'backend-path-dict': { 'backend-path-dict': {
'default': '', 'default': '',
}, },
'ssl-authentication-dict': {}, 'ssl-authentication-dict': {'default': False},
'ssl': { 'ssl': {
'caucase-url': cls.getManagedResource("caucase", CaucaseService).url, 'caucase-url': cls.getManagedResource("caucase", CaucaseService).url,
}, },
'timeout-dict': {'default': None},
'family-path-routing-dict': {}, 'family-path-routing-dict': {},
'path-routing-list': [], 'path-routing-list': [],
} }
...@@ -185,18 +187,27 @@ class BalancerTestCase(ERP5InstanceTestCase): ...@@ -185,18 +187,27 @@ class BalancerTestCase(ERP5InstanceTestCase):
class SlowHTTPServer(ManagedHTTPServer): class SlowHTTPServer(ManagedHTTPServer):
"""An HTTP Server which reply after 2 seconds. """An HTTP Server which reply after a timeout.
Timeout is 2 seconds by default, and can be specified in the path of the URL
""" """
class RequestHandler(BaseHTTPRequestHandler): class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self): def do_GET(self):
# type: () -> None # type: () -> None
self.send_response(200) self.send_response(200)
self.send_header("Content-Type", "text/plain") self.send_header("Content-Type", "text/plain")
time.sleep(2) timeout = 2
try:
timeout = int(self.path[1:])
except ValueError:
pass
time.sleep(timeout)
self.end_headers() self.end_headers()
self.wfile.write("OK\n") self.wfile.write(b"OK\n")
log_message = logging.getLogger(__name__ + '.SlowHTTPServer').info
log_message = logging.getLogger(__name__ + '.SlowHandler').info
class TestLog(BalancerTestCase, CrontabMixin): class TestLog(BalancerTestCase, CrontabMixin):
...@@ -206,7 +217,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -206,7 +217,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> Dict # type: () -> Dict
parameter_dict = super(TestLog, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a slow server instead # use a slow server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("slow_web_server", SlowHTTPServer).netloc, 1, False]]
return parameter_dict return parameter_dict
...@@ -214,7 +225,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -214,7 +225,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
def test_access_log_format(self): def test_access_log_format(self):
# type: () -> None # type: () -> None
requests.get( requests.get(
urlparse.urljoin(self.default_balancer_url, '/url_path'), urllib.parse.urljoin(self.default_balancer_url, '/url_path'),
verify=False, verify=False,
) )
time.sleep(.5) # wait a bit more until access is logged time.sleep(.5) # wait a bit more until access is logged
...@@ -254,7 +265,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -254,7 +265,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
'apachedex', 'apachedex',
'ApacheDex-*.html', 'ApacheDex-*.html',
)) ))
with open(apachedex_report, 'r') as f: with open(apachedex_report) as f:
report_text = f.read() report_text = f.read()
self.assertIn('APacheDEX', report_text) self.assertIn('APacheDEX', report_text)
# having this table means that apachedex could parse some lines. # having this table means that apachedex could parse some lines.
...@@ -301,7 +312,7 @@ class TestLog(BalancerTestCase, CrontabMixin): ...@@ -301,7 +312,7 @@ class TestLog(BalancerTestCase, CrontabMixin):
error_line = error_log_file.read().splitlines()[-1] error_line = error_log_file.read().splitlines()[-1]
self.assertIn('apache.conf -D FOREGROUND', error_line) self.assertIn('apache.conf -D FOREGROUND', error_line)
# this log also include a timestamp # this log also include a timestamp
# This regex is for haproxy mostly, so keep it commented for now, until we can # This regex is for haproxy mostly, so keep it commented for now, until we can
# Merge the slapos-master setup and erp5. # Merge the slapos-master setup and erp5.
# self.assertRegexpMatches(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}') # self.assertRegexpMatches(error_line, r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}')
...@@ -331,7 +342,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer): ...@@ -331,7 +342,7 @@ class BalancerCookieHTTPServer(ManagedHTTPServer):
# The name of this cookie is SERVERID # The name of this cookie is SERVERID
assert self.headers['X-Balancer-Current-Cookie'] == 'SERVERID' assert self.headers['X-Balancer-Current-Cookie'] == 'SERVERID'
self.end_headers() self.end_headers()
self.wfile.write(server._name) self.wfile.write(server._name.encode('utf-8'))
log_message = logging.getLogger(__name__ + '.BalancerCookieHTTPServer').info log_message = logging.getLogger(__name__ + '.BalancerCookieHTTPServer').info
return RequestHandler return RequestHandler
...@@ -344,7 +355,7 @@ class TestBalancer(BalancerTestCase): ...@@ -344,7 +355,7 @@ class TestBalancer(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> Dict # type: () -> Dict
parameter_dict = super(TestBalancer, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use two backend servers # use two backend servers
parameter_dict['dummy_http_server'] = [ parameter_dict['dummy_http_server'] = [
...@@ -373,7 +384,7 @@ class TestBalancer(BalancerTestCase): ...@@ -373,7 +384,7 @@ class TestBalancer(BalancerTestCase):
# if backend provides a "SERVERID" cookie, balancer will overwrite it with the # if backend provides a "SERVERID" cookie, balancer will overwrite it with the
# backend selected by balancing algorithm # backend selected by balancing algorithm
self.assertIn( self.assertIn(
requests.get(urlparse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'], requests.get(urllib.parse.urljoin(self.default_balancer_url, '/set_cookie'), verify=False).cookies['SERVERID'],
('default-0', 'default-1'), ('default-0', 'default-1'),
) )
...@@ -400,10 +411,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -400,10 +411,7 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> Dict # type: () -> Dict
parameter_dict = super( parameter_dict = super()._getInstanceParameterDict()
TestTestRunnerEntryPoints,
cls,
)._getInstanceParameterDict()
parameter_dict['dummy_http_server-test-runner-address-list'] = [ parameter_dict['dummy_http_server-test-runner-address-list'] = [
[ [
...@@ -427,18 +435,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase): ...@@ -427,18 +435,18 @@ class TestTestRunnerEntryPoints(BalancerTestCase):
)['default-test-runner-url-list'] )['default-test-runner-url-list']
url_0, url_1, url_2 = test_runner_url_list url_0, url_1, url_2 = test_runner_url_list
self.assertEqual( self.assertEqual(
urlparse.urlparse(url_0).netloc, urllib.parse.urlparse(url_0).netloc,
urlparse.urlparse(url_1).netloc) urllib.parse.urlparse(url_1).netloc)
self.assertEqual( self.assertEqual(
urlparse.urlparse(url_0).netloc, urllib.parse.urlparse(url_0).netloc,
urlparse.urlparse(url_2).netloc) urllib.parse.urlparse(url_2).netloc)
path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format( path_0 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_0/something'.format(
netloc=urlparse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format( path_1 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_1/something'.format(
netloc=urlparse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format( path_2 = '/VirtualHostBase/https/{netloc}/VirtualHostRoot/_vh_unit_test_2/something'.format(
netloc=urlparse.urlparse(url_0).netloc) netloc=urllib.parse.urlparse(url_0).netloc)
self.assertEqual( self.assertEqual(
{ {
...@@ -476,7 +484,7 @@ class TestHTTP(BalancerTestCase): ...@@ -476,7 +484,7 @@ class TestHTTP(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> Dict # type: () -> Dict
parameter_dict = super(TestHTTP, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
# use a HTTP/1.1 server instead # use a HTTP/1.1 server instead
parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]] parameter_dict['dummy_http_server'] = [[cls.getManagedResource("HTTP/1.1 Server", EchoHTTP11Server).netloc, 1, False]]
return parameter_dict return parameter_dict
...@@ -497,32 +505,33 @@ class TestHTTP(BalancerTestCase): ...@@ -497,32 +505,33 @@ class TestHTTP(BalancerTestCase):
'%{http_version}', '%{http_version}',
self.default_balancer_url, self.default_balancer_url,
]), ]),
'1.1', b'1.1',
) )
def test_keep_alive(self): def test_keep_alive(self):
# type: () -> None # type: () -> None
# when doing two requests, connection is established only once # when doing two requests, connection is established only once
session = requests.Session() with requests.Session() as session:
session.verify = False session.verify = False
# do a first request, which establish a first connection # do a first request, which establish a first connection
session.get(self.default_balancer_url).raise_for_status()
# "break" new connection method and check we can make another request
with mock.patch(
"requests.packages.urllib3.connectionpool.HTTPSConnectionPool._new_conn",
) as new_conn:
session.get(self.default_balancer_url).raise_for_status() session.get(self.default_balancer_url).raise_for_status()
new_conn.assert_not_called()
# "break" new connection method and check we can make another request
parsed_url = urlparse.urlparse(self.default_balancer_url) with mock.patch(
# check that we have an open file for the ip connection "requests.packages.urllib3.connectionpool.HTTPSConnectionPool._new_conn",
self.assertTrue([ ) as new_conn:
c for c in psutil.Process(os.getpid()).connections() session.get(self.default_balancer_url).raise_for_status()
if c.status == 'ESTABLISHED' and c.raddr.ip == parsed_url.hostname new_conn.assert_not_called()
and c.raddr.port == parsed_url.port
]) parsed_url = urllib.parse.urlparse(self.default_balancer_url)
# check that we have an open file for the ip connection
self.assertTrue([
c for c in psutil.Process(os.getpid()).connections()
if c.status == 'ESTABLISHED' and c.raddr.ip == parsed_url.hostname
and c.raddr.port == parsed_url.port
])
class ContentTypeHTTPServer(ManagedHTTPServer): class ContentTypeHTTPServer(ManagedHTTPServer):
...@@ -539,12 +548,12 @@ class ContentTypeHTTPServer(ManagedHTTPServer): ...@@ -539,12 +548,12 @@ class ContentTypeHTTPServer(ManagedHTTPServer):
# type: () -> None # type: () -> None
self.send_response(200) self.send_response(200)
if self.path == '/': if self.path == '/':
self.send_header("Content-Length", 0) self.send_header("Content-Length", '0')
return self.end_headers() return self.end_headers()
content_type = self.path[1:] content_type = self.path[1:]
body = "OK" body = b"OK"
self.send_header("Content-Type", content_type) self.send_header("Content-Type", content_type)
self.send_header("Content-Length", len(body)) self.send_header("Content-Length", str(len(body)))
self.end_headers() self.end_headers()
self.wfile.write(body) self.wfile.write(body)
...@@ -558,7 +567,7 @@ class TestContentEncoding(BalancerTestCase): ...@@ -558,7 +567,7 @@ class TestContentEncoding(BalancerTestCase):
@classmethod @classmethod
def _getInstanceParameterDict(cls): def _getInstanceParameterDict(cls):
# type: () -> Dict # type: () -> Dict
parameter_dict = super(TestContentEncoding, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
parameter_dict['dummy_http_server'] = [ parameter_dict['dummy_http_server'] = [
[cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False], [cls.getManagedResource("content_type_server", ContentTypeHTTPServer).netloc, 1, False],
] ]
...@@ -588,19 +597,19 @@ class TestContentEncoding(BalancerTestCase): ...@@ -588,19 +597,19 @@ class TestContentEncoding(BalancerTestCase):
'application/x-font-opentype', 'application/x-font-opentype',
'application/wasm',): 'application/wasm',):
resp = requests.get( resp = requests.get(
urlparse.urljoin(self.default_balancer_url, content_type), urllib.parse.urljoin(self.default_balancer_url, content_type),
verify=False, verify=False,
headers={"Accept-Encoding": "gzip, deflate",}) headers={"Accept-Encoding": "gzip, deflate",})
self.assertEqual(resp.headers['Content-Type'], content_type) self.assertEqual(resp.headers['Content-Type'], content_type)
self.assertEqual( self.assertEqual(
resp.headers.get('Content-Encoding'), resp.headers.get('Content-Encoding'),
'gzip', 'gzip',
'%s uses wrong encoding: %s' % (content_type, resp.headers.get('Content-Encoding'))) '{} uses wrong encoding: {}'.format(content_type, resp.headers.get('Content-Encoding')))
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
def test_no_gzip_encoding(self): def test_no_gzip_encoding(self):
# type: () -> None # type: () -> None
resp = requests.get(urlparse.urljoin(self.default_balancer_url, '/image/png'), verify=False) resp = requests.get(urllib.parse.urljoin(self.default_balancer_url, '/image/png'), verify=False)
self.assertNotIn('Content-Encoding', resp.headers) self.assertNotIn('Content-Encoding', resp.headers)
self.assertEqual(resp.text, 'OK') self.assertEqual(resp.text, 'OK')
...@@ -683,7 +692,7 @@ class CaucaseCertificate(ManagedResource): ...@@ -683,7 +692,7 @@ class CaucaseCertificate(ManagedResource):
cas_args + [ cas_args + [
'--send-csr', self.csr_file, '--send-csr', self.csr_file,
], ],
).split()[0] ).split()[0].decode()
assert csr_id assert csr_id
for _ in range(30): for _ in range(30):
...@@ -699,8 +708,8 @@ class CaucaseCertificate(ManagedResource): ...@@ -699,8 +708,8 @@ class CaucaseCertificate(ManagedResource):
time.sleep(1) time.sleep(1)
else: else:
raise RuntimeError('getting service certificate failed.') raise RuntimeError('getting service certificate failed.')
with open(self.cert_file) as f: with open(self.cert_file) as cert_file:
assert 'BEGIN CERTIFICATE' in f.read() assert 'BEGIN CERTIFICATE' in cert_file.read()
def revoke(self, caucase): def revoke(self, caucase):
# type: (str, CaucaseService) -> None # type: (str, CaucaseService) -> None
...@@ -724,8 +733,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase): ...@@ -724,8 +733,8 @@ class TestServerTLSProvidedCertificate(BalancerTestCase):
# type: () -> Dict # type: () -> Dict
server_caucase = cls.getManagedResource('server_caucase', CaucaseService) server_caucase = cls.getManagedResource('server_caucase', CaucaseService)
server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate) server_certificate = cls.getManagedResource('server_certificate', CaucaseCertificate)
server_certificate.request(cls._ipv4_address.decode(), server_caucase) server_certificate.request(cls._ipv4_address, server_caucase)
parameter_dict = super(TestServerTLSProvidedCertificate, cls)._getInstanceParameterDict() parameter_dict = super()._getInstanceParameterDict()
with open(server_certificate.cert_file) as f: with open(server_certificate.cert_file) as f:
parameter_dict['ssl']['cert'] = f.read() parameter_dict['ssl']['cert'] = f.read()
with open(server_certificate.key_file) as f: with open(server_certificate.key_file) as f:
......
...@@ -28,11 +28,11 @@ ...@@ -28,11 +28,11 @@
import os import os
import json import json
import glob import glob
import urlparse import urllib.parse
import socket import socket
import time import time
import re import re
import BaseHTTPServer import http.server
import multiprocessing import multiprocessing
import subprocess import subprocess
...@@ -44,7 +44,7 @@ from . import setUpModule ...@@ -44,7 +44,7 @@ from . import setUpModule
setUpModule # pyflakes setUpModule # pyflakes
class TestPublishedURLIsReachableMixin(object): class TestPublishedURLIsReachableMixin:
"""Mixin that checks that default page of ERP5 is reachable. """Mixin that checks that default page of ERP5 is reachable.
""" """
...@@ -52,7 +52,7 @@ class TestPublishedURLIsReachableMixin(object): ...@@ -52,7 +52,7 @@ class TestPublishedURLIsReachableMixin(object):
# We access ERP5 trough a "virtual host", which should make # We access ERP5 trough a "virtual host", which should make
# ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root # ERP5 produce URLs using https://virtual-host-name:1234/virtual_host_root
# as base. # as base.
virtual_host_url = urlparse.urljoin( virtual_host_url = urllib.parse.urljoin(
base_url, base_url,
'/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/' '/VirtualHostBase/https/virtual-host-name:1234/{}/VirtualHostRoot/_vh_virtual_host_root/'
.format(site_id)) .format(site_id))
...@@ -72,20 +72,20 @@ class TestPublishedURLIsReachableMixin(object): ...@@ -72,20 +72,20 @@ class TestPublishedURLIsReachableMixin(object):
total=60, total=60,
backoff_factor=.5, backoff_factor=.5,
status_forcelist=(404, 500, 503)))) status_forcelist=(404, 500, 503))))
with session:
r = session.get(virtual_host_url, verify=verify, allow_redirects=False) r = session.get(virtual_host_url, verify=verify, allow_redirects=False)
self.assertEqual(r.status_code, requests.codes.found) self.assertEqual(r.status_code, requests.codes.found)
# access on / are redirected to login form, with virtual host preserved # access on / are redirected to login form, with virtual host preserved
self.assertEqual(r.headers.get('location'), 'https://virtual-host-name:1234/virtual_host_root/login_form') self.assertEqual(r.headers.get('location'), 'https://virtual-host-name:1234/virtual_host_root/login_form')
# login page can be rendered and contain the text "ERP5" # login page can be rendered and contain the text "ERP5"
r = session.get( r = session.get(
urlparse.urljoin(base_url, '{}/login_form'.format(site_id)), urllib.parse.urljoin(base_url, f'{site_id}/login_form'),
verify=verify, verify=verify,
allow_redirects=False, allow_redirects=False,
) )
self.assertEqual(r.status_code, requests.codes.ok) self.assertEqual(r.status_code, requests.codes.ok)
self.assertIn("ERP5", r.text) self.assertIn("ERP5", r.text)
def test_published_family_default_v6_is_reachable(self): def test_published_family_default_v6_is_reachable(self):
"""Tests the IPv6 URL published by the root partition is reachable. """Tests the IPv6 URL published by the root partition is reachable.
...@@ -134,7 +134,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin): ...@@ -134,7 +134,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual( self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address, f'https://[{self._ipv6_address}]:8888/tree',
param_dict['jupyter-url'] param_dict['jupyter-url']
) )
...@@ -172,7 +172,7 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase): ...@@ -172,7 +172,7 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
} }
def checkValidHTTPSURL(self, url): def checkValidHTTPSURL(self, url):
parsed = urlparse.urlparse(url) parsed = urllib.parse.urlparse(url)
self.assertEqual(parsed.scheme, 'https') self.assertEqual(parsed.scheme, 'https')
self.assertTrue(parsed.hostname) self.assertTrue(parsed.hostname)
self.assertTrue(parsed.port) self.assertTrue(parsed.port)
...@@ -182,16 +182,16 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase): ...@@ -182,16 +182,16 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'): for family_name in ('family1', 'family2'):
self.checkValidHTTPSURL( self.checkValidHTTPSURL(
param_dict['family-{family_name}'.format(family_name=family_name)]) param_dict[f'family-{family_name}'])
self.checkValidHTTPSURL( self.checkValidHTTPSURL(
param_dict['family-{family_name}-v6'.format(family_name=family_name)]) param_dict[f'family-{family_name}-v6'])
def test_published_test_runner_url(self): def test_published_test_runner_url(self):
# each family's also a list of test test runner URLs, by default 3 per family # each family's also a list of test test runner URLs, by default 3 per family
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
for family_name in ('family1', 'family2'): for family_name in ('family1', 'family2'):
family_test_runner_url_list = param_dict[ family_test_runner_url_list = param_dict[
'{family_name}-test-runner-url-list'.format(family_name=family_name)] f'{family_name}-test-runner-url-list']
self.assertEqual(3, len(family_test_runner_url_list)) self.assertEqual(3, len(family_test_runner_url_list))
for url in family_test_runner_url_list: for url in family_test_runner_url_list:
self.checkValidHTTPSURL(url) self.checkValidHTTPSURL(url)
...@@ -209,23 +209,23 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase): ...@@ -209,23 +209,23 @@ class TestApacheBalancerPorts(ERP5InstanceTestCase):
# normal access on ipv4 and ipv6 and test runner access on ipv4 only # normal access on ipv4 and ipv6 and test runner access on ipv4 only
with self.slap.instance_supervisor_rpc as supervisor: with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo() all_process_info = supervisor.getAllProcessInfo()
process_info, = [p for p in all_process_info if p['name'] == 'apache'] process_info, = (p for p in all_process_info if p['name'] == 'apache')
apache_process = psutil.Process(process_info['pid']) apache_process = psutil.Process(process_info['pid'])
self.assertEqual( self.assertEqual(
sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2), sorted([socket.AF_INET] * 4 + [socket.AF_INET6] * 2),
sorted([ sorted(
c.family c.family
for c in apache_process.connections() for c in apache_process.connections()
if c.status == 'LISTEN' if c.status == 'LISTEN'
])) ))
def test_haproxy_listen(self): def test_haproxy_listen(self):
# There is one haproxy per family # There is one haproxy per family
with self.slap.instance_supervisor_rpc as supervisor: with self.slap.instance_supervisor_rpc as supervisor:
all_process_info = supervisor.getAllProcessInfo() all_process_info = supervisor.getAllProcessInfo()
process_info, = [ process_info, = (
p for p in all_process_info if p['name'].startswith('haproxy-') p for p in all_process_info if p['name'].startswith('haproxy-')
] )
haproxy_process = psutil.Process(process_info['pid']) haproxy_process = psutil.Process(process_info['pid'])
self.assertEqual([socket.AF_INET, socket.AF_INET], [ self.assertEqual([socket.AF_INET, socket.AF_INET], [
c.family for c in haproxy_process.connections() if c.status == 'LISTEN' c.family for c in haproxy_process.connections() if c.status == 'LISTEN'
...@@ -290,8 +290,8 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac ...@@ -290,8 +290,8 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
zodb["pool-timeout"] = "10m" zodb["pool-timeout"] = "10m"
storage["storage"] = "root" storage["storage"] = "root"
storage["server"] = zeo_addr storage["server"] = zeo_addr
with open('%s/etc/zope-%s.conf' % (partition, zope)) as f: with open(f'{partition}/etc/zope-{zope}.conf') as f:
conf = map(str.strip, f.readlines()) conf = list(map(str.strip, f.readlines()))
i = conf.index("<zodb_db root>") + 1 i = conf.index("<zodb_db root>") + 1
conf = iter(conf[i:conf.index("</zodb_db>", i)]) conf = iter(conf[i:conf.index("</zodb_db>", i)])
for line in conf: for line in conf:
...@@ -300,23 +300,23 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac ...@@ -300,23 +300,23 @@ class TestZopeNodeParameterOverride(ERP5InstanceTestCase, TestPublishedURLIsReac
if line == '</zeoclient>': if line == '</zeoclient>':
break break
checkParameter(line, storage) checkParameter(line, storage)
for k, v in storage.iteritems(): for k, v in storage.items():
self.assertIsNone(v, k) self.assertIsNone(v, k)
del storage del storage
else: else:
checkParameter(line, zodb) checkParameter(line, zodb)
for k, v in zodb.iteritems(): for k, v in zodb.items():
self.assertIsNone(v, k) self.assertIsNone(v, k)
partition = self.getComputerPartitionPath('zope-a') partition = self.getComputerPartitionPath('zope-a')
for zope in xrange(3): for zope in range(3):
checkConf({ checkConf({
"cache-size-bytes": "20MB", "cache-size-bytes": "20MB",
}, { }, {
"cache-size": "50MB", "cache-size": "50MB",
}) })
partition = self.getComputerPartitionPath('zope-bb') partition = self.getComputerPartitionPath('zope-bb')
for zope in xrange(5): for zope in range(5):
checkConf({ checkConf({
"cache-size-bytes": "500MB" if zope else 1<<20, "cache-size-bytes": "500MB" if zope else 1<<20,
}, { }, {
...@@ -332,19 +332,20 @@ def popenCommunicate(command_list, input_=None, **kwargs): ...@@ -332,19 +332,20 @@ def popenCommunicate(command_list, input_=None, **kwargs):
popen.kill() popen.kill()
if popen.returncode != 0: if popen.returncode != 0:
raise ValueError( raise ValueError(
'Issue during calling %r, result was:\n%s' % (command_list, result)) f'Issue during calling {command_list!r}, result was:\n{result}')
return result return result
class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler): class TestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self): def do_GET(self):
self.send_response(200) self.send_response(200)
response = json.dumps(
response = { {
'Path': self.path, 'Path': self.path,
'Incoming Headers': self.headers.dict 'Incoming Headers': {k.lower(): v for k, v in self.headers.items()},
} },
response = json.dumps(response, indent=2) indent=2,
).encode('utf-8')
self.end_headers() self.end_headers()
self.wfile.write(response) self.wfile.write(response)
...@@ -352,7 +353,7 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler): ...@@ -352,7 +353,7 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
class TestDeploymentScriptInstantiation(ERP5InstanceTestCase): class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
"""This check deployment script like instantiation """This check deployment script like instantiation
Low level assertions are done here in roder to assure that Low level assertions are done here in order to assure that
https://lab.nexedi.com/nexedi/slapos.package/blob/master/playbook/ https://lab.nexedi.com/nexedi/slapos.package/blob/master/playbook/
slapos-master-standalone.yml slapos-master-standalone.yml
works correctly works correctly
...@@ -426,8 +427,8 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase): ...@@ -426,8 +427,8 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
1, 1,
len(backend_apache_configuration_list) len(backend_apache_configuration_list)
) )
backend_apache_configuration = open( with open(backend_apache_configuration_list[0]) as f:
backend_apache_configuration_list[0]).read() backend_apache_configuration = f.read()
self.assertIn( self.assertIn(
'SSLVerifyClient require', 'SSLVerifyClient require',
backend_apache_configuration backend_apache_configuration
...@@ -452,7 +453,7 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase): ...@@ -452,7 +453,7 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
common_name = 'TEST-SSL-AUTH' common_name = 'TEST-SSL-AUTH'
popenCommunicate([ popenCommunicate([
'openssl', 'req', '-utf8', '-nodes', '-config', openssl_config, '-new', 'openssl', 'req', '-utf8', '-nodes', '-config', openssl_config, '-new',
'-keyout', key, '-out', csr, '-days', '3650'], '%s\n' % (common_name,), '-keyout', key, '-out', csr, '-days', '3650'], f'{common_name}\n'.encode(),
stdin=subprocess.PIPE) stdin=subprocess.PIPE)
popenCommunicate([ popenCommunicate([
'openssl', 'ca', '-utf8', '-days', '3650', '-batch', '-config', 'openssl', 'ca', '-utf8', '-days', '3650', '-batch', '-config',
...@@ -464,24 +465,24 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase): ...@@ -464,24 +465,24 @@ class TestDeploymentScriptInstantiation(ERP5InstanceTestCase):
ip, port = re.search( ip, port = re.search(
r'.*http:\/\/(.*):(\d*)\/.*', portal_slap_line).groups() r'.*http:\/\/(.*):(\d*)\/.*', portal_slap_line).groups()
port = int(port) port = int(port)
server = BaseHTTPServer.HTTPServer((ip, port), TestHandler) server = http.server.HTTPServer((ip, port), TestHandler)
server_process = multiprocessing.Process( server_process = multiprocessing.Process(
target=server.serve_forever, name='HTTPServer') target=server.serve_forever, name='HTTPServer')
server_process.start() server_process.start()
try: self.addCleanup(server_process.terminate)
# assert that accessing the service endpoint results with certificate self.addCleanup(server_process.join, 10)
# authentication and proper information extraction server.socket.close()
result_json = requests.get(
self.getRootPartitionConnectionParameterDict()['family-service'], # assert that accessing the service endpoint results with certificate
verify=False, cert=(cert, key)).json() # authentication and proper information extraction
self.assertEqual( result_json = requests.get(
common_name, self.getRootPartitionConnectionParameterDict()['family-service'],
result_json['Incoming Headers']['remote-user'] verify=False, cert=(cert, key)).json()
) self.assertEqual(
self.assertEqual( common_name,
'/erp5/portal_slap/', result_json['Incoming Headers']['remote-user']
result_json['Path'] )
) self.assertEqual(
finally: '/erp5/portal_slap/',
server_process.join(10) result_json['Path']
server_process.terminate() )
############################################################################## ##############################################################################
# coding: utf-8
# #
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved. # Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
# #
...@@ -29,7 +28,7 @@ ...@@ -29,7 +28,7 @@
import os import os
import json import json
import glob import glob
import urlparse import urllib.parse
import socket import socket
import sys import sys
import time import time
...@@ -38,7 +37,7 @@ import datetime ...@@ -38,7 +37,7 @@ import datetime
import subprocess import subprocess
import gzip import gzip
from backports import lzma import lzma
import MySQLdb import MySQLdb
from slapos.testing.utils import CrontabMixin from slapos.testing.utils import CrontabMixin
...@@ -80,7 +79,7 @@ class MariaDBTestCase(ERP5InstanceTestCase): ...@@ -80,7 +79,7 @@ class MariaDBTestCase(ERP5InstanceTestCase):
def getDatabaseConnection(self): def getDatabaseConnection(self):
connection_parameter_dict = json.loads( connection_parameter_dict = json.loads(
self.computer_partition.getConnectionParameterDict()['_']) self.computer_partition.getConnectionParameterDict()['_'])
db_url = urlparse.urlparse(connection_parameter_dict['database-list'][0]) db_url = urllib.parse.urlparse(connection_parameter_dict['database-list'][0])
self.assertEqual('mysql', db_url.scheme) self.assertEqual('mysql', db_url.scheme)
self.assertTrue(db_url.path.startswith('/')) self.assertTrue(db_url.path.startswith('/'))
...@@ -91,6 +90,8 @@ class MariaDBTestCase(ERP5InstanceTestCase): ...@@ -91,6 +90,8 @@ class MariaDBTestCase(ERP5InstanceTestCase):
host=db_url.hostname, host=db_url.hostname,
port=db_url.port, port=db_url.port,
db=database_name, db=database_name,
use_unicode=True,
charset='utf8mb4'
) )
...@@ -106,7 +107,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -106,7 +107,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'mariadb-full', 'mariadb-full',
'20500101000000.sql.gz', '20500101000000.sql.gz',
), ),
'r') as dump: 'rt') as dump:
self.assertIn('CREATE TABLE', dump.read()) self.assertIn('CREATE TABLE', dump.read())
def test_logrotate_and_slow_query_digest(self): def test_logrotate_and_slow_query_digest(self):
...@@ -148,7 +149,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -148,7 +149,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'slowquery_digest', 'slowquery_digest',
'slowquery_digest.txt-2050-01-01.xz', 'slowquery_digest.txt-2050-01-01.xz',
) )
with lzma.open(slow_query_report, 'r') as f: with lzma.open(slow_query_report, 'rt') as f:
# this is the hash for our "select sleep(n)" slow query # this is the hash for our "select sleep(n)" slow query
self.assertIn("ID 0xF9A57DD5A41825CA", f.read()) self.assertIn("ID 0xF9A57DD5A41825CA", f.read())
...@@ -170,7 +171,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -170,7 +171,7 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
subprocess.check_output('faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'], shell=True) subprocess.check_output('faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'], shell=True)
self.assertEqual( self.assertEqual(
error_context.exception.output, error_context.exception.output,
"""\ b"""\
Threshold is lower than expected: Threshold is lower than expected:
Expected total queries : 1.0 and current is: 2 Expected total queries : 1.0 and current is: 2
Expected slowest query : 0.1 and current is: 3 Expected slowest query : 0.1 and current is: 3
...@@ -220,7 +221,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -220,7 +221,7 @@ class TestMroonga(MariaDBTestCase):
""" """
SELECT mroonga_normalize("ABCDあぃうぇ㍑") SELECT mroonga_normalize("ABCDあぃうぇ㍑")
""") """)
self.assertEqual((('abcdあぃうぇリットル',),), self.assertEqual((('abcdあぃうぇリットル'.encode(),),),
cnx.store_result().fetch_row(maxrows=2)) cnx.store_result().fetch_row(maxrows=2))
if 0: if 0:
...@@ -233,7 +234,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -233,7 +234,7 @@ class TestMroonga(MariaDBTestCase):
""" """
SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark") SELECT mroonga_normalize("aBcDあぃウェ㍑", "NormalizerMySQLUnicodeCIExceptKanaCIKanaWithVoicedSoundMark")
""") """)
self.assertEqual((('ABCDあぃうぇ㍑',),), self.assertEqual((('ABCDあぃうぇ㍑'.encode(),),),
cnx.store_result().fetch_row(maxrows=2)) cnx.store_result().fetch_row(maxrows=2))
def test_mroonga_full_text_normalizer(self): def test_mroonga_full_text_normalizer(self):
...@@ -321,7 +322,7 @@ class TestMroonga(MariaDBTestCase): ...@@ -321,7 +322,7 @@ class TestMroonga(MariaDBTestCase):
cnx = self.getDatabaseConnection() cnx = self.getDatabaseConnection()
with contextlib.closing(cnx): with contextlib.closing(cnx):
cnx.query("SELECT mroonga_command('register token_filters/stem')") cnx.query("SELECT mroonga_command('register token_filters/stem')")
self.assertEqual((('true',),), cnx.store_result().fetch_row(maxrows=2)) self.assertEqual(((b'true',),), cnx.store_result().fetch_row(maxrows=2))
cnx.query( cnx.query(
""" """
CREATE TABLE memos ( CREATE TABLE memos (
......
...@@ -75,5 +75,5 @@ def lookupMount(zurl): ...@@ -75,5 +75,5 @@ def lookupMount(zurl):
# readfile returns content of file @path. # readfile returns content of file @path.
def readfile(path): def readfile(path):
with open(path, 'r') as f: with open(path) as f:
return f.read() return f.read()
...@@ -17,7 +17,6 @@ changes to the code, run tests and publish changes. ...@@ -17,7 +17,6 @@ changes to the code, run tests and publish changes.
```bash ```bash
# install this software release and request an instance # install this software release and request an instance
# use software-py3.cfg instead of software.cfg if the SR you want to test is written in Python 3
SR=https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-sr-testing/software.cfg SR=https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-sr-testing/software.cfg
COMP=slaprunner COMP=slaprunner
INSTANCE_NAME=$COMP INSTANCE_NAME=$COMP
......
[buildout]
extends =
software.cfg
[python]
part = python3
[python-interpreter]
extra-eggs +=
# plantuml 0.3.0 is only available for Python 3
${slapos.test.plantuml-setup:egg}
[template]
extra =
# The following list is for SR whose buildout runs only with Python 3.
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
caucase ${slapos.test.caucase-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup}
galene ${slapos.test.galene-setup:setup}
grafana ${slapos.test.grafana-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
helloworld ${slapos.test.helloworld-setup:setup}
html5as ${slapos.test.html5as-setup:setup}
html5as-base ${slapos.test.html5as-base-setup:setup}
htmlvalidatorserver ${slapos.test.htmlvalidatorserver-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
jscrawler ${slapos.test.jscrawler-setup:setup}
jstestnode ${slapos.test.jstestnode-setup:setup}
jupyter ${slapos.test.jupyter-setup:setup}
kvm ${slapos.test.kvm-setup:setup}
matomo ${slapos.test.matomo-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup}
repman ${slapos.test.repman-setup:setup}
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
seleniumserver ${slapos.test.seleniumserver-setup:setup}
theia ${slapos.test.theia-setup:setup}
turnserver ${slapos.test.turnserver-setup:setup}
...@@ -27,9 +27,6 @@ parts = ...@@ -27,9 +27,6 @@ parts =
shared-part-list = shared-part-list =
[python]
part = python2.7
[setup-develop-egg] [setup-develop-egg]
recipe = zc.recipe.egg:develop recipe = zc.recipe.egg:develop
...@@ -250,8 +247,7 @@ egg = slapos.core ...@@ -250,8 +247,7 @@ egg = slapos.core
setup = ${slapos.core-repository:location} setup = ${slapos.core-repository:location}
[python-interpreter] [python-interpreter]
eggs += ${:extra-eggs} eggs +=
extra-eggs =
${lxml-python:egg} ${lxml-python:egg}
${python-PyYAML:egg} ${python-PyYAML:egg}
${slapos.core-setup:egg} ${slapos.core-setup:egg}
...@@ -289,10 +285,11 @@ extra-eggs = ...@@ -289,10 +285,11 @@ extra-eggs =
${slapos.test.kvm-setup:egg} ${slapos.test.kvm-setup:egg}
${slapos.test.matomo-setup:egg} ${slapos.test.matomo-setup:egg}
${slapos.test.metabase-setup:egg} ${slapos.test.metabase-setup:egg}
${slapos.test.ors-amarisoft-setup:egg}
${slapos.test.monitor-setup:egg} ${slapos.test.monitor-setup:egg}
${slapos.test.nextcloud-setup:egg} ${slapos.test.nextcloud-setup:egg}
${slapos.test.nginx-push-stream-setup:egg} ${slapos.test.nginx-push-stream-setup:egg}
${slapos.test.ors-amarisoft-setup:egg}
${slapos.test.plantuml-setup:egg}
${slapos.test.powerdns-setup:egg} ${slapos.test.powerdns-setup:egg}
${slapos.test.proftpd-setup:egg} ${slapos.test.proftpd-setup:egg}
${slapos.test.re6stnet-setup:egg} ${slapos.test.re6stnet-setup:egg}
...@@ -352,27 +349,46 @@ context = ...@@ -352,27 +349,46 @@ context =
tests = tests =
json-schemas ${slapos.cookbook-setup:setup} json-schemas ${slapos.cookbook-setup:setup}
# The following list is for SR that work with either Python 2 and 3
# (as main Python). The test egg must supply a URL which depends on
# the version of Python that is used to run the test.
# Due to a bug in the way promises are run, we may also list some Py3-only SR
# here, to check there's no promise issue when slapos node runs with Python 2.
erp5 ${slapos.test.erp5-setup:setup}
fluentd ${slapos.test.fluentd-setup:setup}
###
${:extra}
extra =
# WARNING: This is for SR that only support Python 2.
# You should not add more lines here.
backupserver ${slapos.test.backupserver-setup:setup} backupserver ${slapos.test.backupserver-setup:setup}
beremiz-ide ${slapos.test.beremiz-ide-setup:setup} beremiz-ide ${slapos.test.beremiz-ide-setup:setup}
caddy-frontend ${slapos.test.caddy-frontend-setup:setup}
caucase ${slapos.test.caucase-setup:setup}
cloudooo ${slapos.test.cloudooo-setup:setup} cloudooo ${slapos.test.cloudooo-setup:setup}
dream ${slapos.test.dream-setup:setup} dream ${slapos.test.dream-setup:setup}
erp5 ${slapos.test.erp5-setup:setup}
erp5testnode ${slapos.test.erp5testnode-setup:setup}
fluentd ${slapos.test.fluentd-setup:setup}
galene ${slapos.test.galene-setup:setup}
gitlab ${slapos.test.gitlab-setup:setup} gitlab ${slapos.test.gitlab-setup:setup}
grafana ${slapos.test.grafana-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
helloworld ${slapos.test.helloworld-setup:setup}
html5as ${slapos.test.html5as-setup:setup}
html5as-base ${slapos.test.html5as-base-setup:setup}
htmlvalidatorserver ${slapos.test.htmlvalidatorserver-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
jscrawler ${slapos.test.jscrawler-setup:setup}
jstestnode ${slapos.test.jstestnode-setup:setup}
jupyter ${slapos.test.jupyter-setup:setup}
kvm ${slapos.test.kvm-setup:setup}
matomo ${slapos.test.matomo-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
plantuml ${slapos.test.plantuml-setup:setup}
powerdns ${slapos.test.powerdns-setup:setup}
proftpd ${slapos.test.proftpd-setup:setup}
re6stnet ${slapos.test.re6stnet-setup:setup} re6stnet ${slapos.test.re6stnet-setup:setup}
repman ${slapos.test.repman-setup:setup}
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
seleniumserver ${slapos.test.seleniumserver-setup:setup}
slapos-master ${slapos.test.slapos-master-setup:setup} slapos-master ${slapos.test.slapos-master-setup:setup}
slaprunner ${slapos.test.slaprunner-setup:setup} slaprunner ${slapos.test.slaprunner-setup:setup}
theia ${slapos.test.theia-setup:setup}
turnserver ${slapos.test.turnserver-setup:setup}
upgrade_erp5 ${slapos.test.upgrade_erp5-setup:setup} upgrade_erp5 ${slapos.test.upgrade_erp5-setup:setup}
[versions] [versions]
...@@ -397,13 +413,11 @@ PyPDF2 = 1.26.0+SlapOSPatched001 ...@@ -397,13 +413,11 @@ PyPDF2 = 1.26.0+SlapOSPatched001
# Django 1.11 is python 2 compatible # Django 1.11 is python 2 compatible
Django = 1.11 Django = 1.11
mock = 2.0.0:whl
testfixtures = 6.11.0 testfixtures = 6.11.0
funcsigs = 1.0.2 funcsigs = 1.0.2
mysqlclient = 1.3.12 mysqlclient = 1.3.12
pexpect = 4.8.0 pexpect = 4.8.0
ptyprocess = 0.6.0 ptyprocess = 0.6.0
typing = 3.7.4.3
psycopg2 = 2.8.6 psycopg2 = 2.8.6
......
...@@ -48,7 +48,6 @@ setup(name=name, ...@@ -48,7 +48,6 @@ setup(name=name,
'supervisor', 'supervisor',
'psutil', 'psutil',
'paramiko', 'paramiko',
'six',
'requests', 'requests',
], ],
zip_safe=True, zip_safe=True,
......
...@@ -35,12 +35,11 @@ import subprocess ...@@ -35,12 +35,11 @@ import subprocess
import json import json
import time import time
from six.moves.urllib.parse import urlparse from urllib.parse import urlparse
from six.moves.urllib.parse import quote from urllib.parse import quote
from six.moves.urllib.parse import urljoin from urllib.parse import urljoin
from six.moves.configparser import ConfigParser from configparser import ConfigParser
import requests import requests
import six
from slapos.recipe.librecipe import generateHashFromFiles from slapos.recipe.librecipe import generateHashFromFiles
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
...@@ -61,7 +60,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -61,7 +60,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
data = { data = {
"path": "workspace/slapos/software/%s" % software_release, "path": "workspace/slapos/software/%s" % software_release,
} }
resp = self._postToSlaprunner(url, data) resp = self._postToSlaprunner(url, data)
self.assertEqual(requests.codes.ok, resp.status_code) self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotEqual(json.loads(resp.text)['code'], 0, self.assertNotEqual(json.loads(resp.text)['code'], 0,
'Unexpecting result in call to setCurrentProject: %s' % resp.text) 'Unexpecting result in call to setCurrentProject: %s' % resp.text)
...@@ -69,7 +68,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -69,7 +68,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _buildSoftwareRelease(self): def _buildSoftwareRelease(self):
parameter_dict = self.computer_partition.getConnectionParameterDict() parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/runSoftwareProfile" % parameter_dict['url'] url = "%s/runSoftwareProfile" % parameter_dict['url']
resp = self._postToSlaprunner(url, {}) resp = self._postToSlaprunner(url, {})
self.assertEqual(requests.codes.ok, resp.status_code) self.assertEqual(requests.codes.ok, resp.status_code)
self.assertEqual(json.loads(resp.text)['result'], True, self.assertEqual(json.loads(resp.text)['result'], True,
'Unexpecting result in call to runSoftwareProfile: %s' % resp.text) 'Unexpecting result in call to runSoftwareProfile: %s' % resp.text)
...@@ -77,7 +76,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -77,7 +76,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _deployInstance(self): def _deployInstance(self):
parameter_dict = self.computer_partition.getConnectionParameterDict() parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/runInstanceProfile" % parameter_dict['url'] url = "%s/runInstanceProfile" % parameter_dict['url']
resp = self._postToSlaprunner(url, {}) resp = self._postToSlaprunner(url, {})
self.assertEqual(requests.codes.ok, resp.status_code) self.assertEqual(requests.codes.ok, resp.status_code)
self.assertEqual(json.loads(resp.text)['result'], True, self.assertEqual(json.loads(resp.text)['result'], True,
'Unexpecting result in call to runSoftwareProfile: %s' % resp.text) 'Unexpecting result in call to runSoftwareProfile: %s' % resp.text)
...@@ -100,7 +99,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -100,7 +99,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
def _isSoftwareReleaseReady(self): def _isSoftwareReleaseReady(self):
parameter_dict = self.computer_partition.getConnectionParameterDict() parameter_dict = self.computer_partition.getConnectionParameterDict()
url = "%s/isSRReady" % parameter_dict['url'] url = "%s/isSRReady" % parameter_dict['url']
resp = self._getFromSlaprunner(url) resp = self._getFromSlaprunner(url)
if requests.codes.ok != resp.status_code: if requests.codes.ok != resp.status_code:
return -1 return -1
return resp.text return resp.text
...@@ -125,7 +124,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -125,7 +124,7 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
} }
while True: while True:
time.sleep(25) time.sleep(25)
resp = self._postToSlaprunner(url, data) resp = self._postToSlaprunner(url, data)
if requests.codes.ok != resp.status_code: if requests.codes.ok != resp.status_code:
continue continue
if json.loads(resp.text)["instance"]["state"] is False: if json.loads(resp.text)["instance"]["state"] is False:
...@@ -153,9 +152,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -153,9 +152,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
url = "%s/getFileContent" % parameter_dict['url'] url = "%s/getFileContent" % parameter_dict['url']
data = { data = {
"file": relative_path "file": relative_path
} }
resp = self._postToSlaprunner(url, data) resp = self._postToSlaprunner(url, data)
self.assertEqual(requests.codes.ok, resp.status_code) self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotEqual(json.loads(resp.text)['code'], 0, self.assertNotEqual(json.loads(resp.text)['code'], 0,
'Unexpecting result in call to getFileContent: %s' % resp.text) 'Unexpecting result in call to getFileContent: %s' % resp.text)
...@@ -189,9 +188,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase): ...@@ -189,9 +188,9 @@ class SlaprunnerTestCase(SlapOSInstanceTestCase):
parameter_dict = self.computer_partition.getConnectionParameterDict() parameter_dict = self.computer_partition.getConnectionParameterDict()
takeover_url = parameter_dict["takeover-%s-url" % scope] takeover_url = parameter_dict["takeover-%s-url" % scope]
takeover_password = parameter_dict["takeover-%s-password" % scope] takeover_password = parameter_dict["takeover-%s-password" % scope]
resp = requests.get( resp = requests.get(
"%s?password=%s" % (takeover_url, takeover_password), f"{takeover_url}?password={takeover_password}",
verify=True) verify=True)
self.assertEqual(requests.codes.ok, resp.status_code) self.assertEqual(requests.codes.ok, resp.status_code)
self.assertNotIn("Error", resp.text, self.assertNotIn("Error", resp.text,
...@@ -236,7 +235,7 @@ class TestWebRunnerAutorun(SlaprunnerTestCase): ...@@ -236,7 +235,7 @@ class TestWebRunnerAutorun(SlaprunnerTestCase):
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
# Auto deploy is required for the isSRReady works. # Auto deploy is required for the isSRReady works.
'auto-deploy': 'true', 'auto-deploy': 'true',
'autorun': 'true', 'autorun': 'true',
'software-root': os.path.join(cls.slap._instance_root, "..", "soft"), 'software-root': os.path.join(cls.slap._instance_root, "..", "soft"),
...@@ -363,7 +362,7 @@ class TestSSH(SlaprunnerTestCase): ...@@ -363,7 +362,7 @@ class TestSSH(SlaprunnerTestCase):
self.assertTrue(fingerprint_from_url.startswith('ssh-rsa-'), fingerprint_from_url) self.assertTrue(fingerprint_from_url.startswith('ssh-rsa-'), fingerprint_from_url)
fingerprint_from_url = fingerprint_from_url[len('ssh-rsa-'):] fingerprint_from_url = fingerprint_from_url[len('ssh-rsa-'):]
class KeyPolicy(object): class KeyPolicy:
"""Accept server key and keep it in self.key for inspection """Accept server key and keep it in self.key for inspection
""" """
def missing_host_key(self, client, hostname, key): def missing_host_key(self, client, hostname, key):
...@@ -507,7 +506,7 @@ class TestResilientInstance(SlaprunnerTestCase): ...@@ -507,7 +506,7 @@ class TestResilientInstance(SlaprunnerTestCase):
# just check that keys returned on requested partition are for resilient # just check that keys returned on requested partition are for resilient
self.assertSetEqual( self.assertSetEqual(
set(self.computer_partition.getConnectionParameterDict().keys()), set(self.computer_partition.getConnectionParameterDict().keys()),
set([ {
'backend-url', 'backend-url',
'feed-url-runner-1-pull', 'feed-url-runner-1-pull',
'feed-url-runner-1-push', 'feed-url-runner-1-push',
...@@ -520,7 +519,7 @@ class TestResilientInstance(SlaprunnerTestCase): ...@@ -520,7 +519,7 @@ class TestResilientInstance(SlaprunnerTestCase):
'takeover-runner-1-password', 'takeover-runner-1-password',
'takeover-runner-1-url', 'takeover-runner-1-url',
'url', 'url',
'webdav-url'])) 'webdav-url'})
class TestResilientCustomFrontend(TestCustomFrontend): class TestResilientCustomFrontend(TestCustomFrontend):
instance_max_retry = 20 instance_max_retry = 20
...@@ -589,7 +588,7 @@ class TestResilientDummyInstance(SlaprunnerTestCase): ...@@ -589,7 +588,7 @@ class TestResilientDummyInstance(SlaprunnerTestCase):
self._waitForCloneToBeReadyForTakeover() self._waitForCloneToBeReadyForTakeover()
self._doTakeover() self._doTakeover()
self.slap.waitForInstance(20) self.slap.waitForInstance(20)
previous_computer_partition = self.computer_partition previous_computer_partition = self.computer_partition
self.computer_partition = self.requestDefaultInstance() self.computer_partition = self.requestDefaultInstance()
...@@ -601,5 +600,5 @@ class TestResilientDummyInstance(SlaprunnerTestCase): ...@@ -601,5 +600,5 @@ class TestResilientDummyInstance(SlaprunnerTestCase):
self.assertTrue(result_after.startswith("Hello"), result_after) self.assertTrue(result_after.startswith("Hello"), result_after)
self.assertIn(result, result_after, self.assertIn(result, result_after,
"%s not in %s" % (result, result_after)) f"{result} not in {result_after}")
...@@ -391,52 +391,49 @@ class TestTheiaEnv(TheiaTestCase): ...@@ -391,52 +391,49 @@ class TestTheiaEnv(TheiaTestCase):
# Start a theia shell that inherits the environment of the theia process # Start a theia shell that inherits the environment of the theia process
# This simulates the environment of a shell launched from the browser application # This simulates the environment of a shell launched from the browser application
theia_shell_process = pexpect.spawnu('{}/bin/theia-shell'.format(self.getPath()), env=theia_env) theia_shell_process = pexpect.spawnu('{}/bin/theia-shell'.format(self.getPath()), env=theia_env)
try: self.addCleanup(theia_shell_process.wait)
theia_shell_process.expect_exact('Standalone SlapOS for computer `slaprunner` activated') self.addCleanup(theia_shell_process.terminate)
# Launch slapos node software from theia shell theia_shell_process.expect_exact('Standalone SlapOS for computer `slaprunner` activated')
theia_shell_process.sendline('slapos node software')
theia_shell_process.expect('Installing software release %s' % self.dummy_software_path) # Launch slapos node software from theia shell
theia_shell_process.expect('Finished software releases.') theia_shell_process.sendline('slapos node software')
theia_shell_process.expect('Installing software release %s' % self.dummy_software_path)
# Get the theia shell environment theia_shell_process.expect('Finished software releases.')
with open(env_json_path) as f:
theia_shell_env = json.load(f) # Get the theia shell environment
with open(env_json_path) as f:
# Remove the env.json file to later be sure that a new one has been generated theia_shell_env = json.load(f)
os.remove(env_json_path)
# Remove the env.json file to later be sure that a new one has been generated
# Launch slapos node software service from the embedded supervisord. os.remove(env_json_path)
# Note that we have two services, slapos-node-software and slapos-node-software-all
# The later uses --all which is what we want to use here, because the software # Launch slapos node software service from the embedded supervisord.
# is already installed and we want to install it again, this time from supervisor # Note that we have two services, slapos-node-software and slapos-node-software-all
embedded_run_path = self.getPath('srv', 'runner', 'var', 'run') # The later uses --all which is what we want to use here, because the software
embedded_supervisord_socket_path = _getSupervisordSocketPath(embedded_run_path, self.logger) # is already installed and we want to install it again, this time from supervisor
with getSupervisorRPC(embedded_supervisord_socket_path) as embedded_supervisor: embedded_run_path = self.getPath('srv', 'runner', 'var', 'run')
previous_stop_time = embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop'] embedded_supervisord_socket_path = _getSupervisordSocketPath(embedded_run_path, self.logger)
embedded_supervisor.startProcess('slapos-node-software-all') with getSupervisorRPC(embedded_supervisord_socket_path) as embedded_supervisor:
for _retries in range(20): previous_stop_time = embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop']
time.sleep(1) embedded_supervisor.startProcess('slapos-node-software-all')
if embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop'] != previous_stop_time: for _retries in range(20):
break time.sleep(1)
else: if embedded_supervisor.getProcessInfo('slapos-node-software-all')['stop'] != previous_stop_time:
self.fail("the supervisord service 'slapos-node-software-all' takes too long to finish") break
else:
# Get the supervisord environment self.fail("the supervisord service 'slapos-node-software-all' takes too long to finish")
with open(env_json_path) as f:
supervisord_env = json.load(f) # Get the supervisord environment
with open(env_json_path) as f:
# Compare relevant variables from both environments supervisord_env = json.load(f)
self.maxDiff = None
self.assertEqual(theia_shell_env['PATH'].split(':'), supervisord_env['PATH'].split(':')) # Compare relevant variables from both environments
self.assertEqual(theia_shell_env['SLAPOS_CONFIGURATION'], supervisord_env['SLAPOS_CONFIGURATION']) self.maxDiff = None
self.assertEqual(theia_shell_env['SLAPOS_CLIENT_CONFIGURATION'], supervisord_env['SLAPOS_CLIENT_CONFIGURATION']) self.assertEqual(theia_shell_env['PATH'].split(':'), supervisord_env['PATH'].split(':'))
self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME']) self.assertEqual(theia_shell_env['SLAPOS_CONFIGURATION'], supervisord_env['SLAPOS_CONFIGURATION'])
self.assertEqual(theia_shell_env['SLAPOS_CLIENT_CONFIGURATION'], supervisord_env['SLAPOS_CLIENT_CONFIGURATION'])
finally: self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME'])
# Cleanup the theia shell process
theia_shell_process.terminate()
theia_shell_process.wait()
class ResilientTheiaMixin(object): class ResilientTheiaMixin(object):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment