Commit 79b9c851 authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents 5b55ce53 2e5ab151
......@@ -25,6 +25,8 @@ shared = true
version = 1.7.2
md5sum = 9a00835e4da8c215348e263b550fc130
url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2
environment =
LDFLAGS=-Wl,-rpath=${libuuid:location}/lib
[apr-util]
recipe = slapos.recipe.cmmi
......@@ -35,6 +37,9 @@ md5sum = b6e8c9b31d938fe5797ceb0d1ff2eb69
configure-options =
--with-apr=${apr:location}
--with-expat=${libexpat:location}
--without-sqlite3
environment =
LDFLAGS=-Wl,-rpath=${libuuid:location}/lib
[apache]
recipe = slapos.recipe.cmmi
......@@ -97,8 +102,8 @@ configure-options = --disable-static
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
CPPFLAGS =-I${libuuid:location}/include -I${openssl:location}/include -I${apr:location}/include -I${apr-util:location}/include
LDFLAGS =-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${apr:location}/lib -Wl,-rpath=${apr:location}/lib -L${apr-util:location}/lib -Wl,-rpath=${apr-util:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
CPPFLAGS=-I${libuuid:location}/include -I${openssl:location}/include -I${apr:location}/include -I${apr-util:location}/include
LDFLAGS=-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${apr:location}/lib -Wl,-rpath=${apr:location}/lib -L${apr-util:location}/lib -Wl,-rpath=${apr-util:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
[apache-antiloris]
# Note: This component tries to write in a [apache] parts, which is now
......
......@@ -30,6 +30,6 @@ configure-options =
--with-systemdsystemunitdir=no
environment =
PATH=${pkgconfig:location}/bin:${glib:location}/bin:%(PATH)s
CFLAGS=-I${gdbm:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig
CFLAGS=-I${gdbm:location}/include -I${libexpat:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${libexpat:location}/lib/pkgconfig
......@@ -9,7 +9,7 @@ recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/babeld.git
branch = master
git-executable = ${git:location}/bin/git
revision = v1.12.1-nxd1
revision = v1.12.1-nxd3
[babeld]
recipe = slapos.recipe.cmmi
......
......@@ -36,7 +36,7 @@ shared = true
pyyaml = ${pyyaml-download:target}
pyaml = ${pyaml-download:target}
init =
# add the python executable in the options dict so that
# add the python executable in the options dict so that
# buildout signature changes if python executable changes
import sys
options['python-executable'] = sys.executable
......
......@@ -13,6 +13,7 @@ url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.0.tar.xz
md5sum = 0d79ae8a6124546e3b94171375e5e5d0
configure-options =
--disable-libcap
--without-selinux
--prefix=@@LOCATION@@
environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
......
......@@ -11,3 +11,4 @@ md5sum = 0bbd38f12656e4728e2f7c4708aec014
configure-options =
--disable-static
--enable-libgdbm-compat
--without-readline
......@@ -31,17 +31,25 @@ environment =
GOROOT_FINAL=${:location}
${:environment-extra}
# TestChown and TestSCMCredentials currently fail in a user-namespace
# TestChown currently fails in a user-namespace
# https://github.com/golang/go/issues/42525
# the patches apply to go >= 1.12
patch-options = -p1
patches =
${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f
[golang-common-pre-1.19]
<= golang-common
# TestSCMCredentials fails in a user-namespace if golang version < 1.19
# https://github.com/golang/go/issues/42525
patches +=
${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59
[golang14]
<= golang-common
<= golang-common-pre-1.19
# https://golang.org/doc/install/source#bootstrapFromSource
url = https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz
md5sum = dbf727a4b0e365bf88d97cbfde590016
......@@ -64,7 +72,7 @@ setarch = setarch arm
[golang1.12]
<= golang-common
<= golang-common-pre-1.19
url = https://golang.org/dl/go1.12.17.src.tar.gz
md5sum = 6b607fc795391dc609ffd79ebf41f080
......@@ -73,7 +81,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang1.16]
<= golang-common
<= golang-common-pre-1.19
url = https://golang.org/dl/go1.16.13.src.tar.gz
md5sum = 1c076f952d9af57590a36fa7d36f695a
......@@ -82,7 +90,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang1.17]
<= golang-common
<= golang-common-pre-1.19
url = https://golang.org/dl/go1.17.13.src.tar.gz
md5sum = 4476707f05cf6915ec1173038dc357a9
......@@ -91,7 +99,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang1.18]
<= golang-common
<= golang-common-pre-1.19
url = https://golang.org/dl/go1.18.9.src.tar.gz
md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1
......@@ -102,6 +110,14 @@ md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang1.20]
<= golang-common
url = https://go.dev/dl/go1.20.6.src.tar.gz
md5sum = 1dc2d18790cfaede7df1e73a1eff8b7b
# go1.20 requires go1.17.13 to bootstrap (see https://go.dev/doc/go1.20#bootstrap)
environment-extra =
GOROOT_BOOTSTRAP=${golang1.17:location}
# ---- infrastructure to build Go workspaces / projects ----
# gowork is the top-level section that defines Go workspace.
......@@ -161,7 +177,7 @@ bin = ${gowork.dir:bin}
depends = ${gowork.goinstall:recipe}
# go version used for the workspace (possible to override in applications)
golang = ${golang1.18:location}
golang = ${golang1.20:location}
# no special build flags by default
buildflags =
......
......@@ -11,6 +11,8 @@ shared = true
url = https://github.com/logrotate/logrotate/releases/download/3.18.1/logrotate-3.18.1.tar.xz
md5sum = 07d5aba26c350f9ab5730c25a7277751
# BBB this is only for backward-compatibility.
configure-options =
--with-selinux=no
post-install =
ln -nsf . @@LOCATION@@/usr
environment =
......
......@@ -11,6 +11,7 @@ extends =
../openssl/buildout.cfg
../readline/buildout.cfg
../sqlite3/buildout.cfg
../util-linux/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
......@@ -45,35 +46,35 @@ pre-install = mkdir profile-opt
# which would otherwise load the system libmagic.so with ctypes
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${xz-utils:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include
LDFLAGS=-L${zlib:location}/lib -L${xz-utils:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${xz-utils:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib
CPPFLAGS=-I${zlib:location}/include -I${xz-utils:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include -I${libuuid:location}/include
LDFLAGS=-L${zlib:location}/lib -L${xz-utils:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${xz-utils:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${libuuid:location}/lib
[python3.7]
<= python3-common
version = 3.7
package_version = 3.7.15
md5sum = d2ff16776b5d822efc2a7cbf42fc2915
package_version = 3.7.17
md5sum = dd94cab4541b57b88cf3dab32d6336e3
[python3.8]
<= python3-common
version = 3.8
package_version = 3.8.15
md5sum = cca78a827d2327f5c3ff2dee9f526e7e
package_version = 3.8.18
md5sum = 5ea6267ea00513fc31d3746feb35842d
[python3.9]
<= python3-common
version = 3.9
package_version = 3.9.15
md5sum = 8adc5662c9fd10a23ae8ae9f28b65b49
package_version = 3.9.18
md5sum = 765576c3af57deb046819ecd57804bbb
[python3.10]
<= python3-common
version = 3.10
package_version = 3.10.8
md5sum = e92356b012ed4d0e09675131d39b1bde
package_version = 3.10.13
md5sum = 8847dc6458d1431d0ae0f55942deeb89
[python3.11]
<= python3-common
version = 3.11
package_version = 3.11.0
md5sum = fe92acfa0db9b9f5044958edb451d463
package_version = 3.11.5
md5sum = 393856f1b7713aa8bba4b642ab9985d3
......@@ -45,6 +45,7 @@ patches =
configure-options =
--enable-multibyte
--disable-static
--with-curses
environment =
CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
......@@ -10,6 +10,7 @@ recipe = slapos.recipe.cmmi
shared = true
url = https://sqlite.org/2022/sqlite-autoconf-3370200.tar.gz
md5sum = 683cc5312ee74e71079c14d24b7a6d27
pre-configure = sed -i 's/-ltinfo//g;s/-ltermcap//g;s/ termcap//;s/ curses//' configure
configure-options =
--disable-static
--enable-readline
......
......@@ -50,7 +50,7 @@ CGO_LDFLAGS += -Wl,-rpath=${zlib:location}/lib
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/wendelin.core.git
branch = master
revision = wendelin.core-2.0.alpha3-0-g6315384
revision = wendelin.core-2.0.alpha3-7-g885b355
# dir is pretty name as top-level recipe
location = ${buildout:parts-directory}/wendelin.core
git-executable = ${git:location}/bin/git
......@@ -221,49 +221,6 @@ class TestWaitForFiles(WrapperTestCase):
self.fail('process did not start after file was created')
@unittest.skipUnless(sys.platform.startswith("linux"), "Inotify is linux only")
class TestWaitForFilesInotifyError(TestWaitForFiles):
def setUp(self):
super(TestWaitForFilesInotifyError, self).setUp()
# use LD_PRELOAD to inject errors into inotify_add_watch calls
inotify_mock_c = self.getTempPath('inotify_mock.c')
inotify_mock_o = self.getTempPath('inotify_mock.o')
inotify_mock_so = self.getTempPath('inotify_mock.so')
with open(inotify_mock_c, 'w') as f:
f.write('''
#include <sys/inotify.h>
#include <string.h>
#include <errno.h>
int inotify_add_watch(int fd, const char *pathname, uint32_t mask) {
errno = ENOSPC;
return -1;
}
/* This is a bit tricky because inotify_simple calls
inotify_add_watch with ctypes.CDLL("libc.so"), which uses
dlopen("libc.so") and dlsym("inotify_add_watch"), so we first
override dlsym to return our own inotify_add_watch.
https://github.com/chrisjbillington/inotify_simple/blob/55737898/inotify_simple.py#L110
*/
extern void *__libc_dlsym (void *, const char *);
void *dlsym(void *handle, const char *symbol) {
if (strcmp(symbol, "inotify_add_watch") == 0) {
return (void *)inotify_add_watch;
}
return (void *)__libc_dlsym(handle, symbol);
}
''')
subprocess.check_call(['gcc', '-c', '-fPIC', '-o', inotify_mock_o, inotify_mock_c])
subprocess.check_call(['gcc', '-shared', '-o', inotify_mock_so, inotify_mock_o])
self.env = dict(
os.environ,
PYTHONUNBUFFERED='1',
LD_PRELOAD=inotify_mock_so)
expected_output = 'Error using inotify, falling back to polling\ndone\n'
class TestPrivateTmpFS(WrapperTestCase):
def getOptions(self):
......
......@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase):
# Check that there is a RSS feed
self.assertTrue('rss' in parameter_dict)
self.assertTrue(parameter_dict['rss'].startswith(
f'https://[{self._ipv6_address}]:9443/'
f'https://[{self.computer_partition_ipv6_address}]:9443/'
))
result = requests.get(
......
......@@ -47,7 +47,7 @@ class TestCaucase(SlapOSInstanceTestCase):
connection_parameter_dict = self.deserializeConnectionParameter()
self.assertEqual(
connection_parameter_dict,
{'url': 'http://[%s]:8009' % (self._ipv6_address,)}
{'url': 'http://[%s]:8009' % (self.computer_partition_ipv6_address,)}
)
result = requests.get(connection_parameter_dict['url'])
......@@ -57,14 +57,14 @@ class TestCaucase(SlapOSInstanceTestCase):
{
'_links': {
'self': {
'href': 'http://[%s]:8009' % (self._ipv6_address,)
'href': 'http://[%s]:8009' % (self.computer_partition_ipv6_address,)
},
'getCAUHAL': {
'href': 'http://[%s]:8009//cau' % (self._ipv6_address,),
'href': 'http://[%s]:8009//cau' % (self.computer_partition_ipv6_address,),
'title': 'cau'
},
'getCASHAL': {
'href': 'http://[%s]:8009//cas' % (self._ipv6_address,),
'href': 'http://[%s]:8009//cas' % (self.computer_partition_ipv6_address,),
'title': 'cas'
}
}
......
......@@ -16,7 +16,7 @@ shared = true
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.34.1.tar.gz
md5sum = 77cbb2523aca8dad90fd77ee0277704f
configure-command = :
make-binary = cargo install --root=%(location)s --path .
make-binary = cargo install --root=%(location)s --path . --locked
make-targets =
environment =
PATH=${rustc:location}/bin:%(PATH)s
......
......@@ -131,14 +131,14 @@ class TestFileServer(SlapOSInstanceTestCase):
return cnx.sock._sslobj.getpeercert()
cert_before = _getpeercert()
# execute certificate updater two month later, when it's time to renew certificate.
# execute certificate updater when it's time to renew certificate.
# use a timeout, because this service runs forever
subprocess.run(
(
'timeout',
'5',
'faketime',
'+2 months',
'+63 days',
os.path.join(
self.computer_partition_root_path,
'etc/service/dufs-certificate-updater'),
......
......@@ -147,7 +147,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address,
'https://[%s]:8888/tree' % self.computer_partition_ipv6_address,
param_dict['jupyter-url']
)
......
......@@ -138,7 +138,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address,
'https://[%s]:8888/tree' % self.getPartitionIPv6(self.getPartitionId("jupyter")),
param_dict['jupyter-url']
)
......
......@@ -115,7 +115,7 @@ class WendelinTutorialTestCase(FluentdTestCase):
round(random.uniform(-20, 50), 3))]
def serve(self, port, request_handler_class):
server_address = (self._ipv6_address, port)
server_address = (self.computer_partition_ipv6_address, port)
server = OneRequestServer(server_address, request_handler_class)
data = server.get_first_data(FLUSH_INTERVAL)
......@@ -181,7 +181,7 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@type forward
<server>
name myserver1
host {cls._ipv6_address}
host {cls.computer_partition_ipv6_address}
</server>
<buffer>
flush_mode immediate
......@@ -199,7 +199,7 @@ print("{measurement_text}")'''
def test_configuration(self):
self._test_configuration(
fr'adding forwarding server \'myserver1\' host="{self._ipv6_address}" port={FLUENTD_PORT} weight=60'
fr'adding forwarding server \'myserver1\' host="{self.computer_partition_ipv6_address}" port={FLUENTD_PORT} weight=60'
)
def test_send_data(self):
......@@ -232,11 +232,11 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
<source>
@type forward
port {fluentd_port}
bind {cls._ipv6_address}
bind {cls.computer_partition_ipv6_address}
</source>
<match tag.name>
@type wendelin
streamtool_uri http://[{cls._ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default
streamtool_uri http://[{cls.computer_partition_ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default
user foo
password bar
<buffer>
......@@ -249,9 +249,9 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
@classmethod
def get_configuration(cls):
fluentd_port = findFreeTCPPort(cls._ipv6_address)
fluentd_port = findFreeTCPPort(cls.computer_partition_ipv6_address)
cls._fluentd_port = fluentd_port
wendelin_port = findFreeTCPPort(cls._ipv6_address)
wendelin_port = findFreeTCPPort(cls.computer_partition_ipv6_address)
cls._wendelin_port = wendelin_port
return cls.gateway_conf(fluentd_port, wendelin_port)
......@@ -260,7 +260,7 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
def test_wendelin_data_forwarding(self):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.connect((self._ipv6_address, self._fluentd_port))
sock.connect((self.computer_partition_ipv6_address, self._fluentd_port))
data = [
msgpack.ExtType(0, struct.pack('!Q', int(time.time()) << 32)),
......
......@@ -39,4 +39,4 @@ md5sum = ad2baf4599a937d7352034a41fa24814
[promtail-config-file]
filename = promtail-config-file.cfg.in
md5sum = c8c9d815dd7b427788c066f041f04573
md5sum = 5f1b3a1a3d3f98daeab4780106452d71
......@@ -7,7 +7,7 @@ server:
external_url: {{ promtail['url'] }}
positions:
filename: {{ promtail['dir'] }}/positions.yaml
filename: {{ promtail['dir'] }}/positions.yaml
clients:
- url: {{ loki['url'] }}/api/prom/push
......
......@@ -319,7 +319,7 @@ class TestListenInPartition(GrafanaTestCase):
c.laddr for c in self.process_dict['grafana'].connections()
if c.status == 'LISTEN'
],
[(self._ipv6_address, 8180)],
[(self.computer_partition_ipv6_address, 8180)],
)
def test_influxdb_listen(self):
......@@ -330,7 +330,7 @@ class TestListenInPartition(GrafanaTestCase):
]),
[
(self._ipv4_address, 8088),
(self._ipv6_address, 8086),
(self.computer_partition_ipv6_address, 8086),
],
)
......
......@@ -43,7 +43,7 @@ class TestHtmlValidatorServer(InstanceTestCase):
self.assertTrue('vnu-url' in parameter_dict)
self.assertEqual(
'https://[%s]:8899/' % (self._ipv6_address, ),
'https://[%s]:8899/' % (self.computer_partition_ipv6_address, ),
parameter_dict['vnu-url']
)
......
......@@ -52,7 +52,7 @@ class TestJSTestNode(InstanceTestCase):
self.assertEqual(
{
'nginx': 'http://[%s]:9443/' % (self._ipv6_address, )
'nginx': 'http://[%s]:9443/' % (self.computer_partition_ipv6_address, )
},
connection_dict
)
......@@ -83,7 +83,7 @@ class TestJSTestNode(InstanceTestCase):
# Default access
result = requests.get(
'http://[%s]:9443' % (self._ipv6_address, ), allow_redirects=False)
'http://[%s]:9443' % (self.computer_partition_ipv6_address, ), allow_redirects=False)
self.assertEqual(
[requests.codes.forbidden, False],
[result.status_code, result.is_redirect]
......
......@@ -52,10 +52,10 @@ class TestJupyter(InstanceTestCase):
self.assertEqual(
{
'jupyter-classic-url': 'https://[%s]:8888/tree' % (self._ipv6_address, ),
'jupyterlab-url': 'https://[%s]:8888/lab' % (self._ipv6_address, ),
'jupyter-classic-url': 'https://[%s]:8888/tree' % (self.computer_partition_ipv6_address, ),
'jupyterlab-url': 'https://[%s]:8888/lab' % (self.computer_partition_ipv6_address, ),
'password': '%s' % (password, ),
'url': 'https://[%s]:8888/tree' % (self._ipv6_address, )
'url': 'https://[%s]:8888/tree' % (self.computer_partition_ipv6_address, )
},
connection_dict
)
......
......@@ -115,6 +115,46 @@ bootstrap_machine_param_dict = {
}
class KVMTestCase(InstanceTestCase):
@classmethod
def _findTopLevelPartitionPath(cls, path):
index = 0
while True:
index = path.find(os.path.sep, index) + len(os.path.sep)
top_path = path[:index]
if os.path.exists(os.path.join(top_path, '.slapos-resource')):
return top_path
if index == -1:
return None
@classmethod
def _updateSlaposResource(cls, partition_path, **kw):
with open(os.path.join(partition_path, '.slapos-resource'), 'r+') as f:
resource = json.load(f)
resource.update(kw)
f.seek(0)
f.truncate()
json.dump(resource, f, indent=2)
@classmethod
def formatPartitions(cls):
super().formatPartitions()
# steal tap from top level partition
instance_directory = cls.slap.instance_directory
top_partition_path = cls._findTopLevelPartitionPath(instance_directory)
with open(os.path.join(top_partition_path, '.slapos-resource')) as f:
top_resource = json.load(f)
for partition in os.listdir(instance_directory):
if not partition.startswith(cls.__partition_reference__):
continue
partition_path = os.path.join(instance_directory, partition)
cls._updateSlaposResource(partition_path, tap=top_resource['tap'])
class KvmMixin:
def getConnectionParameterDictJson(self):
return json.loads(
......@@ -176,7 +216,7 @@ class KvmMixinJson:
@skipUnlessKvm
class TestInstance(InstanceTestCase, KvmMixin):
class TestInstance(KVMTestCase, KvmMixin):
__partition_reference__ = 'i'
def test(self):
......@@ -192,12 +232,12 @@ class TestInstance(InstanceTestCase, KvmMixin):
self.assertEqual(
connection_parameter_dict,
{
'ipv6': self._ipv6_address,
'ipv6': self.computer_partition_ipv6_address,
'maximum-extra-disk-amount': '0',
'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self.computer_partition_ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self.computer_partition_ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self.computer_partition_ipv6_address} : 10080',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......@@ -227,7 +267,7 @@ class TestInstanceJson(
@skipUnlessKvm
class TestMemoryManagement(InstanceTestCase, KvmMixin):
class TestMemoryManagement(KVMTestCase, KvmMixin):
__partition_reference__ = 'i'
def getKvmProcessInfo(self, switch_list):
......@@ -395,7 +435,7 @@ class MonitorAccessMixin(KvmMixin):
@skipUnlessKvm
class TestAccessDefault(MonitorAccessMixin, InstanceTestCase):
class TestAccessDefault(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'ad'
expected_partition_with_monitor_base_url_count = 1
......@@ -416,7 +456,7 @@ class TestAccessDefaultJson(KvmMixinJson, TestAccessDefault):
@skipUnlessKvm
class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase):
class TestAccessDefaultAdditional(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'ada'
expected_partition_with_monitor_base_url_count = 1
......@@ -452,7 +492,7 @@ class TestAccessDefaultAdditionalJson(
@skipUnlessKvm
class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
class TestAccessDefaultBootstrap(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'adb'
expected_partition_with_monitor_base_url_count = 1
......@@ -464,27 +504,22 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
def test(self):
# START: mock .slapos-resource with tap.ipv4_addr
# needed for netconfig.sh
test_partition_slapos_resource_file = os.path.join(
self.computer_partition_root_path, '.slapos-resource')
path = os.path.realpath(os.curdir)
while path != '/':
root_slapos_resource_file = os.path.join(path, '.slapos-resource')
if os.path.exists(root_slapos_resource_file):
break
path = os.path.realpath(os.path.join(path, '..'))
else:
raise ValueError('No .slapos-resource found to base the mock on')
with open(root_slapos_resource_file) as fh:
root_slapos_resource = json.load(fh)
if root_slapos_resource['tap']['ipv4_addr'] == '':
root_slapos_resource['tap'].update({
partition_path = self.computer_partition_root_path
top_partition_path = self._findTopLevelPartitionPath(partition_path)
with open(os.path.join(top_partition_path, '.slapos-resource')) as f:
top_tap = json.load(f)['tap']
if top_tap['ipv4_addr'] == '':
top_tap.update({
"ipv4_addr": "10.0.0.2",
"ipv4_gateway": "10.0.0.1",
"ipv4_netmask": "255.255.0.0",
"ipv4_network": "10.0.0.0"
})
with open(test_partition_slapos_resource_file, 'w') as fh:
json.dump(root_slapos_resource, fh, indent=4)
self._updateSlaposResource(partition_path, tap=top_tap)
self.slap.waitForInstance(max_retry=10)
# END: mock .slapos-resource with tap.ipv4_addr
......@@ -505,7 +540,7 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm
class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
class TestAccessKvmCluster(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akc'
expected_partition_with_monitor_base_url_count = 2
......@@ -535,7 +570,7 @@ class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm
class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase):
class TestAccessKvmClusterAdditional(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akca'
expected_partition_with_monitor_base_url_count = 2
......@@ -575,7 +610,7 @@ class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm
class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase):
class TestAccessKvmClusterBootstrap(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akcb'
expected_partition_with_monitor_base_url_count = 3
......@@ -618,7 +653,7 @@ class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm
class TestInstanceResilient(InstanceTestCase, KvmMixin):
class TestInstanceResilient(KVMTestCase, KvmMixin):
__partition_reference__ = 'ir'
instance_max_retry = 20
......@@ -626,6 +661,13 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
def getInstanceSoftwareType(cls):
return 'kvm-resilient'
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.pbs1_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('PBS (kvm / 1)'))
cls.kvm0_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('kvm0'))
cls.kvm1_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('kvm1'))
def test_kvm_exporter(self):
exporter_partition = os.path.join(
self.slap.instance_directory,
......@@ -661,19 +703,19 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
self.assertRegex(
feed_pull,
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull'.format(
self._ipv6_address))
self.pbs1_ipv6))
feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push')
self.assertRegex(
feed_push,
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push'.format(
self._ipv6_address))
self.pbs1_ipv6))
self.assertEqual(
connection_parameter_dict,
{
'ipv6': self._ipv6_address,
'monitor-base-url': f'https://[{self._ipv6_address}]:8160',
'ipv6': self.kvm0_ipv6,
'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8160',
'monitor-user': 'admin',
'takeover-kvm-1-url': f'http://[{self._ipv6_address}]:9263/',
'takeover-kvm-1-url': f'http://[{self.kvm1_ipv6}]:9263/',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......@@ -733,7 +775,7 @@ class TestInstanceResilientJson(
@skipUnlessKvm
class TestInstanceResilientDiskTypeIde(InstanceTestCase, KvmMixin):
class TestInstanceResilientDiskTypeIde(KVMTestCase, KvmMixin):
@classmethod
def getInstanceParameterDict(cls):
return {
......@@ -748,7 +790,7 @@ class TestInstanceResilientDiskTypeIdeJson(
@skipUnlessKvm
class TestAccessResilientAdditional(InstanceTestCase):
class TestAccessResilientAdditional(KVMTestCase):
__partition_reference__ = 'ara'
expected_partition_with_monitor_base_url_count = 1
......@@ -788,7 +830,7 @@ class TestAccessResilientAdditionalJson(
pass
class TestInstanceNbdServer(InstanceTestCase):
class TestInstanceNbdServer(KVMTestCase):
__partition_reference__ = 'ins'
instance_max_retry = 5
......@@ -870,7 +912,7 @@ class FakeImageServerMixin(KvmMixin):
os.chdir(cls.image_source_directory)
try:
cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='FakeImageHttpServer')
target=server.serve_forever, name='FakeImageHttpServer', daemon=True)
cls.server_process.start()
server.socket.close()
finally:
......@@ -890,7 +932,7 @@ class FakeImageServerMixin(KvmMixin):
@skipUnlessKvm
class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
class TestBootImageUrlList(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'biul'
kvm_instance_partition_reference = 'biul0'
......@@ -931,8 +973,12 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
@classmethod
def setUpClass(cls):
try:
cls.startImageHttpServer()
super().setUpClass()
except BaseException:
cls.stopImageHttpServer()
raise
@classmethod
def tearDownClass(cls):
......@@ -1246,7 +1292,7 @@ class TestBootImageUrlSelectResilientJson(
@skipUnlessKvm
class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin):
class TestBootImageUrlListKvmCluster(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'biulkc'
@classmethod
......@@ -1324,7 +1370,7 @@ class TestBootImageUrlSelectKvmCluster(TestBootImageUrlListKvmCluster):
@skipUnlessKvm
class TestNatRules(KvmMixin, InstanceTestCase):
class TestNatRules(KvmMixin, KVMTestCase):
__partition_reference__ = 'nr'
@classmethod
......@@ -1340,11 +1386,11 @@ class TestNatRules(KvmMixin, InstanceTestCase):
self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict)
self.assertEqual(
f'{self._ipv6_address} : 10100',
f'{self.computer_partition_ipv6_address} : 10100',
connection_parameter_dict['nat-rule-port-tcp-100']
)
self.assertEqual(
f'{self._ipv6_address} : 10200',
f'{self.computer_partition_ipv6_address} : 10200',
connection_parameter_dict['nat-rule-port-tcp-200']
)
......@@ -1356,7 +1402,7 @@ class TestNatRulesJson(
@skipUnlessKvm
class TestNatRulesKvmCluster(InstanceTestCase):
class TestNatRulesKvmCluster(KVMTestCase):
__partition_reference__ = 'nrkc'
nat_rules = ["100", "200", "300"]
......@@ -1405,7 +1451,7 @@ class TestNatRulesKvmClusterComplex(TestNatRulesKvmCluster):
@skipUnlessKvm
class TestWhitelistFirewall(InstanceTestCase):
class TestWhitelistFirewall(KVMTestCase):
__partition_reference__ = 'wf'
kvm_instance_partition_reference = 'wf0'
......@@ -1533,7 +1579,7 @@ class TestWhitelistFirewallRequestCluster(TestWhitelistFirewallRequest):
@skipUnlessKvm
class TestDiskDevicePathWipeDiskOndestroy(InstanceTestCase, KvmMixin):
class TestDiskDevicePathWipeDiskOndestroy(KVMTestCase, KvmMixin):
__partition_reference__ = 'ddpwdo'
kvm_instance_partition_reference = 'ddpwdo0'
......@@ -1568,7 +1614,7 @@ class TestDiskDevicePathWipeDiskOndestroyJson(
@skipUnlessKvm
class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
class TestImageDownloadController(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'idc'
maxDiff = None
......@@ -1770,7 +1816,7 @@ INF: Storing errors in %(error_state_file)s
@skipUnlessKvm
class TestParameterDefault(InstanceTestCase, KvmMixin):
class TestParameterDefault(KVMTestCase, KvmMixin):
__partition_reference__ = 'pd'
@classmethod
......@@ -1879,22 +1925,17 @@ class ExternalDiskMixin(KvmMixin):
def _prepareExternalStorageList(cls):
external_storage_path = os.path.join(cls.working_directory, 'STORAGE')
os.mkdir(external_storage_path)
# reuse .slapos-resource infomration of the containing partition
# it's similar to slapos/recipe/slapconfiguration.py
_resource_home = cls.slap.instance_directory
parent_slapos_resource = None
while not os.path.exists(os.path.join(_resource_home, '.slapos-resource')):
_resource_home = os.path.normpath(os.path.join(_resource_home, '..'))
if _resource_home == "/":
break
else:
with open(os.path.join(_resource_home, '.slapos-resource')) as fh:
parent_slapos_resource = json.load(fh)
assert parent_slapos_resource is not None
for partition in os.listdir(cls.slap.instance_directory):
# We already reuse tap from top level partition
instance_directory = cls.slap.instance_directory
for partition in os.listdir(instance_directory):
if not partition.startswith(cls.__partition_reference__):
continue
partition_path = os.path.join(instance_directory, partition)
partition_store_list = []
for number in range(10):
storage = os.path.join(external_storage_path, f'data{number}')
......@@ -1903,13 +1944,12 @@ class ExternalDiskMixin(KvmMixin):
partition_store = os.path.join(storage, partition)
os.mkdir(partition_store)
partition_store_list.append(partition_store)
slapos_resource = parent_slapos_resource.copy()
slapos_resource['external_storage_list'] = partition_store_list
with open(
os.path.join(
cls.slap.instance_directory, partition, '.slapos-resource'),
'w') as fh:
json.dump(slapos_resource, fh, indent=2)
cls._updateSlaposResource(
partition_path,
external_storage_list=partition_store_list,
)
# above is not enough: the presence of parameter is required in slapos.cfg
slapos_config = []
with open(cls.slap._slapos_config) as fh:
......@@ -1951,7 +1991,7 @@ class ExternalDiskMixin(KvmMixin):
@skipUnlessKvm
class TestExternalDisk(InstanceTestCase, ExternalDiskMixin):
class TestExternalDisk(KVMTestCase, ExternalDiskMixin):
__partition_reference__ = 'ed'
kvm_instance_partition_reference = 'ed0'
......@@ -2101,7 +2141,7 @@ class ExternalDiskModernMixin(object):
@skipUnlessKvm
class TestExternalDiskModern(
ExternalDiskModernMixin, InstanceTestCase, ExternalDiskMixin):
ExternalDiskModernMixin, KVMTestCase, ExternalDiskMixin):
def test(self):
self.prepareEnv()
self.waitForInstance()
......@@ -2122,7 +2162,7 @@ class TestExternalDiskModern(
@skipUnlessKvm
class TestExternalDiskModernConflictAssurance(
ExternalDiskModernMixin, InstanceTestCase, ExternalDiskMixin):
ExternalDiskModernMixin, KVMTestCase, ExternalDiskMixin):
def test(self):
self.prepareEnv()
# Create conflicting configuration
......@@ -2182,7 +2222,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern):
@skipUnlessKvm
class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin):
class TestExternalDiskModernIndexRequired(KVMTestCase, ExternalDiskMixin):
__partition_reference__ = 'edm'
kvm_instance_partition_reference = 'edm0'
......@@ -2246,7 +2286,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin):
@skipUnlessKvm
class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
class TestInstanceHttpServer(KVMTestCase, KvmMixin):
__partition_reference__ = 'ihs'
@classmethod
......@@ -2269,7 +2309,7 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
os.chdir(cls.http_directory)
try:
cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='HttpServer')
target=server.serve_forever, name='HttpServer', daemon=True)
cls.server_process.start()
finally:
os.chdir(old_dir)
......@@ -2288,8 +2328,12 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
@classmethod
def setUpClass(cls):
try:
cls.startHttpServer()
super().setUpClass()
except BaseException:
cls.stopHttpServer()
raise
@classmethod
def tearDownClass(cls):
......@@ -2320,12 +2364,12 @@ vm""",
self.assertEqual(
connection_parameter_dict,
{
'ipv6': self._ipv6_address,
'ipv6': self.computer_partition_ipv6_address,
'maximum-extra-disk-amount': '0',
'monitor-base-url': f'https://[{self._ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080',
'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self.computer_partition_ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self.computer_partition_ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self.computer_partition_ipv6_address} : 10080',
}
)
self.assertEqual(set(present_key_list), set(assert_key_list))
......
[instance-profile]
filename = instance.cfg.in
md5sum = 685e7b371768f6977896d7214fd379f1
md5sum = 0d50ed911a41b76b952b63d37853c3a4
......@@ -44,11 +44,7 @@ command-line = sh -c "cd $${directory:srv-metabase}; ${java:location}/bin/java $
environment =
MB_EMOJI_IN_LOGS=false
MB_JETTY_HOST=$${:ip}
MB_JETTY_PORT=$${:-http-port}
MB_JETTY_SSL_PORT=$${:port}
MB_JETTY_SSL=true
MB_JETTY_SSL_KEYSTORE=$${metabase-keystore:file}
MB_JETTY_SSL_KEYSTORE_PASSWORD=$${metabase-keystore:password}
MB_JETTY_PORT=$${:port}
MB_DB_TYPE=postgres
MB_DB_DBNAME=$${postgresql:dbname}
MB_DB_PORT=$${postgresql:port}
......@@ -62,68 +58,57 @@ environment =
hash-existing-files =
$${buildout:directory}/software_release/buildout.cfg
ip = $${instance-parameter:ipv6-random}
port = 8443
# XXX It does not seem we can prevent metabase to also listen on http, so we
# give it an http port, but don't use it.
-http-port = 18080
hostname = [$${:ip}]
scheme = https
url = $${:scheme}://$${:hostname}:$${:port}
ip = $${instance-parameter:ipv4-random}
port = 18080
promises =
$${metabase-promise:name}
[metabase-promise]
<= monitor-promise-base
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-instance:url}/api/session/properties
[metabase-keystore-password]
recipe = slapos.cookbook:generate.password
[metabase-keystore]
[metabase-frontend-certificate]
recipe = plone.recipe.command
stop-on-error = true
command =
if [ -f $${:file} ]
then
# XXX password used to be "insecure", but we changed to proper password.
# We try to list the store with the new password and if it fail we change
# the keystore password.
if ! ${java:location}/bin/keytool \
-list \
-keystore "$${:file}" \
-storepass "$${:password}"
if [ ! -e $${:cert-file} ]
then
echo "Migrating keystore password" && \
${java:location}/bin/keytool \
-storepasswd \
-keystore "$${:file}" \
-storepass insecure \
-new "$${:password}" && \
echo "Migrating certificate key password" && \
${java:location}/bin/keytool \
-keypasswd \
-alias "$${:alias}" \
-keypass insecure \
-new "$${:password}" \
-keystore "$${:file}" \
-storepass "$${:password}"
${openssl-output:openssl} req -x509 -nodes -days 3650 \
-subj "/C=AA/ST=X/L=X/O=Dis/CN=$${:common-name}" \
-newkey rsa -keyout $${:cert-file} \
-out $${:cert-file}
fi
else
${java:location}/bin/keytool \
-genkeypair \
-alias "$${:alias}" \
-keyalg RSA \
-keypass "$${:password}" \
-dname "CN=$${metabase-instance:ip},OU=Unit,O=Organization,L=City,S=State,C=Country" \
-keystore "$${:file}" \
-storepass "$${:password}"
fi
file = $${directory:etc}/.metabase_keystore
password = $${metabase-keystore-password:passwd}
alias = metabase
update-command = $${:command}
cert-file = $${directory:var}/$${:_buildout_section_name_}.pem
common-name = $${metabase-frontend-config:ip}
location =
$${:cert-file}
[metabase-frontend-config]
recipe = slapos.recipe.template:jinja2
url = ${stack-haproxy-default-backend-config:target}
output = $${directory:etc}/$${:_buildout_section_name_}
context =
key pidfile :pidfile
key content :content
content =
listen app
log global
bind $${:ip}:$${:port} ssl crt $${metabase-frontend-certificate:cert-file} alpn h2,http/1.1
server app $${metabase-instance:ip}:$${metabase-instance:port}
pidfile = $${directory:run}/$${:_buildout_section_name_}.pid
ip = $${instance-parameter:ipv6-random}
port = 8443
[metabase-frontend]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:services}/$${:_buildout_section_name_}
command-line =
${haproxy:location}/sbin/haproxy -f $${metabase-frontend-config:output}
url = https://[$${metabase-frontend-config:ip}]:$${metabase-frontend-config:port}
[metabase-promise]
<= monitor-promise-base
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-frontend:url}/api/session/properties
[postgresql-password]
recipe = slapos.cookbook:generate.password
......@@ -247,6 +232,7 @@ var-cron-entries = $${:var}/cron-entries
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
run = $${buildout:directory}/run
service = $${:etc}/service
srv-metabase = $${:srv}/metabase
srv-backup = $${:srv}/backup
......@@ -254,6 +240,6 @@ fontconfig-cache = $${buildout:directory}/.fontconfig
[publish-connection-parameter]
recipe = slapos.cookbook:publish
url = $${metabase-instance:url}
url = $${metabase-frontend:url}
backup-crontab = $${postgresql-backup-crontab-entry:name}
restore-backup-script = $${postgresql-restore-backup:wrapper-path}
......@@ -6,6 +6,7 @@ extends =
../../component/java/buildout.cfg
../../component/postgresql/buildout.cfg
../../component/dcron/buildout.cfg
../../stack/haproxy/default-backend.cfg
../../stack/slapos.cfg
buildout.hash.cfg
../../stack/monitor/buildout.cfg
......@@ -19,8 +20,8 @@ parts =
[metabase.jar]
recipe = slapos.recipe.build:download
url = https://downloads.metabase.com/v0.45.2/metabase.jar
md5sum = cca1f4d663ebfa60b3a6d93705b340a0
url = https://downloads.metabase.com/v0.47.0/metabase.jar
md5sum = b81c71668a2177d89690730fabd85d9e
[instance-profile]
recipe = slapos.recipe.template
......
......@@ -30,7 +30,7 @@ md5sum = 9f27195d770b2f57461c60a82c851ab9
[instance-neo]
filename = instance-neo.cfg.in
md5sum = 504b021715566e69ad664101f1b12a5c
md5sum = fda911d5ef9efee365f1b0ff9843a50b
[template-neo-my-cnf]
filename = my.cnf.in
......
......@@ -3,9 +3,10 @@
{% set init_list = [] -%}
{% set private_tmpfs = slapparameter_dict.get('private-tmpfs') -%}
{% set storage_count = slapparameter_dict.get('storage-count', 1) -%}
{% set storage_type = slapparameter_dict.get('storage-type') or (
'MySQL' if mariadb_location is defined else 'SQLite') -%}
{% set mysql = storage_type != 'SQLite' -%}
{% set mysql = storage_count and storage_type != 'SQLite' -%}
{% if mysql -%}
[{{ section('mysqld') }}]
......@@ -113,7 +114,7 @@ engine = ${my-cnf-parameters:engine}
dedup = {{ dumps(bool(slapparameter_dict.get('data-deduplication'))) }}
disable-drop-partitions = {{ dumps(bool(slapparameter_dict.get('disable-drop-partitions'))) }}
{% for i in range(slapparameter_dict.get('storage-count', 1)) -%}
{% for i in range(storage_count) -%}
{% set storage_id = 'neo-storage-' ~ i -%}
[{{ section(storage_id) }}]
< = neo-storage
......
......@@ -65,6 +65,10 @@ class NextCloudTestCase(InstanceTestCase):
self.nextcloud_path,
"Nextcloud path not found in %r" % (partition_path_list,))
# lookup nextcloud partition ipv6
partition_id = os.path.basename(self.partition_dir)
self.nextcloud_ipv6 = self.getPartitionIPv6(partition_id)
# parse database info from mariadb url
d = self.computer_partition.getConnectionParameterDict()
db_url = d['mariadb-url-list'][2:-2] # parse <url> out of "['<url>']"
......@@ -86,9 +90,9 @@ class NextCloudTestCase(InstanceTestCase):
mail_smtpport="587",
mail_smtppassword="",
mail_smtpname="",
cli_url="https://[%s]:9988/" % self._ipv6_address,
cli_url="https://[%s]:9988/" % self.nextcloud_ipv6,
partition_dir=self.partition_dir,
trusted_domain_list=json.dumps(["[%s]:9988" % self._ipv6_address]),
trusted_domain_list=json.dumps(["[%s]:9988" % self.nextcloud_ipv6]),
trusted_proxy_list=[],
)
data_dict.update(config_dict)
......@@ -336,7 +340,7 @@ class TestNextCloudParameters(NextCloudTestCase):
cli_url="nextcloud.example.com",
partition_dir=self.partition_dir,
trusted_domain_list=json.dumps([
"[%s]:9988" % self._ipv6_address,
"[%s]:9988" % self.nextcloud_ipv6,
"nextcloud.example.com",
"nextcloud.proxy.com"
]),
......
......@@ -16,7 +16,7 @@
[template]
filename = instance.cfg
md5sum = 38bceddc77a5a44d69f0d9614909d1a2
md5sum = efcb5965bdf766d94b43c6ba2328275d
[amarisoft-stats.jinja2.py]
_update_hash_filename_ = amarisoft-stats.jinja2.py
......@@ -36,11 +36,11 @@ md5sum = b34fe47a73890097fbc6ea6374aeb38d
[template-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = 6382f871c0f4e7e965c95de7a959342a
md5sum = 8b6e778ec21e03ef3f832ee420d7e83a
[template-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = 4df8edfb9a8bcbdcc9740afb27a88928
md5sum = a94322f85f438dccba699e0f5ea09e3a
[template-core-network]
_update_hash_filename_ = instance-core-network.jinja2.cfg
......
......@@ -276,6 +276,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -5,6 +5,9 @@ parts =
enb-config
enb-service
xamari-xlog-service
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
xlog-fluentbit-service
{% endif %}
amarisoft-stats-service
amarisoft-rf-info-service
{% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %}
......@@ -176,6 +179,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_}
command-line = ${xamari-xlog-script:output}
hash-files = ${:command-line}
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
[xlog-fluentbit-config]
recipe = slapos.recipe.template
output = ${directory:etc}/${:_buildout_section_name_}.cfg
logfile = ${xamari-xlog-script:logfile}
forward-host = {{ slapparameter_dict.get('xlog_fluentbit_forward_host', '') }}
forward-port = {{ slapparameter_dict.get('xlog_fluentbit_forward_port', '') }}
forward-shared-key = {{ slapparameter_dict.get('xlog_fluentbit_forward_shared_key', '') }}
forward-self-hostname = {{ ors_id['ors-id'] }}
inline =
[SERVICE]
flush 5
[INPUT]
name tail
path ${:logfile}
Read_from_Head True
[OUTPUT]
name forward
match *
Host ${:forward-host}
{%- if slapparameter_dict.get('xlog_fluentbit_forward_port') %}
Port ${:forward-port}
{%- endif %}
Shared_Key ${:forward-shared-key}
Self_Hostname ${:forward-self-hostname}
tls on
tls.verify off
[xlog-fluentbit-service]
recipe = slapos.cookbook:wrapper
fluentbit = {{ fluent_bit_location }}/bin/fluent-bit
fluentbit-config = ${xlog-fluentbit-config:output}
command-line = ${:fluentbit} -c ${:fluentbit-config}
wrapper-path = ${directory:service}/${:_buildout_section_name_}
hash-files = ${:fluentbit-config}
{% endif %}
[amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
......
......@@ -274,6 +274,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -316,6 +316,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -328,6 +328,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -5,6 +5,9 @@ parts =
gnb-config
enb-service
xamari-xlog-service
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
xlog-fluentbit-service
{% endif %}
amarisoft-stats-service
amarisoft-rf-info-service
{% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %}
......@@ -153,6 +156,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_}
command-line = ${xamari-xlog-script:output}
hash-files = ${:command-line}
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
[xlog-fluentbit-config]
recipe = slapos.recipe.template
output = ${directory:etc}/${:_buildout_section_name_}.cfg
logfile = ${xamari-xlog-script:logfile}
forward-host = {{ slapparameter_dict.get('xlog_fluentbit_forward_host', '') }}
forward-port = {{ slapparameter_dict.get('xlog_fluentbit_forward_port', '') }}
forward-shared-key = {{ slapparameter_dict.get('xlog_fluentbit_forward_shared_key', '') }}
forward-self-hostname = {{ ors_id['ors-id'] }}
inline =
[SERVICE]
flush 5
[INPUT]
name tail
path ${:logfile}
Read_from_Head True
[OUTPUT]
name forward
match *
Host ${:forward-host}
{%- if slapparameter_dict.get('xlog_fluentbit_forward_port') %}
Port ${:forward-port}
{%- endif %}
Shared_Key ${:forward-shared-key}
Self_Hostname ${:forward-self-hostname}
tls on
tls.verify off
[xlog-fluentbit-service]
recipe = slapos.cookbook:wrapper
fluentbit = {{ fluent_bit_location }}/bin/fluent-bit
fluentbit-config = ${xlog-fluentbit-config:output}
command-line = ${:fluentbit} -c ${:fluentbit-config}
wrapper-path = ${directory:service}/${:_buildout_section_name_}
hash-files = ${:fluentbit-config}
{% endif %}
[amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
......
......@@ -172,6 +172,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -326,6 +326,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number",
"default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
}
}
}
......@@ -171,6 +171,16 @@ init =
except:
options['ipv4'] = "0.0.0.0"
[ors-id]
recipe = slapos.recipe.build
computer = $${slap-connection:computer-id}
title = $${slap-configuration:root-instance-title}
init =
import socket
options['hostname'] = socket.gethostname()
ors_id = '__'.join(options[x] for x in ('hostname', 'computer', 'title'))
options['ors-id'] = ors_id
[switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype
enb = dynamic-template-enb:output
......@@ -201,6 +211,7 @@ extensions = jinja2.ext.do
extra-context =
raw monitor_template ${monitor2-template:output}
section ors_version ors-version
section ors_id ors-id
key enb amarisoft:enb
key sdr amarisoft:sdr
raw enb_template ${enb.jinja2.cfg:target}
......@@ -221,6 +232,7 @@ extra-context =
raw dnsmasq_template ${dnsmasq.jinja2.cfg:target}
raw dnsmasq_location ${dnsmasq:location}
key dnsmasq_config_path dnsmasq-config:output
raw fluent_bit_location ${fluent-bit:location}
[dynamic-template-gnb]
< = jinja2-template-base
......@@ -230,6 +242,7 @@ extensions = jinja2.ext.do
extra-context =
raw monitor_template ${monitor2-template:output}
section ors_version ors-version
section ors_id ors-id
key enb amarisoft:enb
key sdr amarisoft:sdr
raw gnb_template ${gnb.jinja2.cfg:target}
......@@ -244,6 +257,7 @@ extra-context =
raw default_n_antenna_ul ${default-params:default-n-antenna-ul}
raw rf_mode ${rf-mode:rf-mode}
raw python_path ${python3:location}
raw fluent_bit_location ${fluent-bit:location}
[dynamic-template-core-network]
< = jinja2-template-base
......
......@@ -13,6 +13,7 @@ extends =
../../component/pygolang/buildout.cfg
../../component/git/buildout.cfg
../../component/dnsmasq/buildout.cfg
../../component/fluent-bit/buildout.cfg
parts +=
template
......
......@@ -37,7 +37,6 @@ import urllib
from slapos.recipe.librecipe import generateHashFromFiles
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
skip = unittest.skip('port conflit between powerdns instances')
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
......@@ -93,9 +92,10 @@ class PowerDNSTestCase(SlapOSInstanceTestCase):
ns_record = []
for replicate_nb in range(1, dns_quantity + 1):
ns_id = 'ns%s' % replicate_nb
partition_id = self.getPartitionId(ns_id)
ns_record.append(ns_id + '.' + self.default_supported_zone)
expected_dict[ns_id + '-port'] = str(DNS_PORT)
expected_dict[ns_id + '-ipv6'] = self._ipv6_address
expected_dict[ns_id + '-ipv6'] = self.getPartitionIPv6(partition_id)
expected_dict['ns-record'] = ','.join(ns_record)
expected_dict['slave-amount'] = str(slave_amount)
......@@ -199,11 +199,11 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
).getConnectionParameterDict())
return parameter_dict_list
def dns_query(self, domain_name, subnet):
def dns_query(self, domain_name, subnet, ipv6, port):
message = dns.message.make_query(domain_name, 'A')
client_subnet_option = dns.edns.ECSOption(subnet)
message.use_edns(options=[client_subnet_option])
answer = dns.query.udp(message, self._ipv6_address, port=DNS_PORT)
answer = dns.query.udp(message, ipv6, port=port)
return answer.get_rrset(
dns.message.ANSWER,
dns.name.from_text(domain_name),
......@@ -211,8 +211,9 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
dns.rdatatype.CNAME
).to_text().split()[-1]
def _test_dns_resolver(self):
def _test_dns_resolver(self, dns_quantity):
slave_parameter_dict_dict = self.getSlaveParameterDictDict()
connection_dict = self.computer_partition.getConnectionParameterDict()
subnet_dict = {
'africa': AFRICAN_SUBNET,
'china-telecom': CHINA_TELECOM_SUBNET,
......@@ -248,13 +249,19 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
slave_parameter_dict['record'], slave_parameter_dict['applicable-zone']
)
for region in subnet_dict:
for replicate_nb in range(1, dns_quantity + 1):
ns_id = 'ns%s' % replicate_nb
self.assertEqual(
slave_parameter_dict.get(
region,
'%s.%s.' % (
default_rr_dict[region], slave_parameter_dict['origin'])
),
self.dns_query(domain_name, subnet_dict[region])
self.dns_query(
domain_name,
subnet_dict[region],
connection_dict[ns_id + '-ipv6'],
int(connection_dict[ns_id + '-port']))
)
def _test_slaves(self, dns_quantity=1):
......@@ -262,7 +269,7 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
dns_quantity=dns_quantity,
slave_amount=len(self.getSlaveParameterDictDict())
)
self._test_dns_resolver()
self._test_dns_resolver(dns_quantity)
class TestSlaveRequest(PowerDNSSlaveTestCase):
......@@ -421,7 +428,7 @@ class TestSlaveRequestDomains(TestSlaveRequest):
}
# Because all powerdns instances run under the same ip address during tests,
# Because all powerdns instances run under the same ipv4 address during tests,
# there is a port conflict between these instances
@skip
class TestMultipleInstances(TestSlaveRequestDomains):
......
......@@ -15,7 +15,7 @@
[instance-profile]
filename = instance.cfg.in
md5sum = 39cdfbd3bdfcd48eddb4132ff9dcda62
md5sum = 7f9749ab75475bd5d98be27a570c7731
[instance-default]
filename = instance-default.cfg.in
......
......@@ -18,12 +18,12 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename}
extensions = jinja2.ext.do
context =
key slapparameter_dict slap-configuration:configuration
raw software_parts_directory {{ buildout['parts-directory'] }}
raw proftpd_bin {{ proftpd_bin }}
raw software_parts_directory {{ buildout['parts-directory'] }}
raw proftpd_bin {{ proftpd_bin }}
raw ftpasswd_bin {{ ftpasswd_bin }}
raw ftpdctl_bin {{ ftpdctl_bin }}
raw ssh_keygen_bin {{ ssh_keygen_bin }}
raw template_monitor {{ template_monitor }}
raw ftpdctl_bin {{ ftpdctl_bin }}
raw ssh_keygen_bin {{ ssh_keygen_bin }}
raw template_monitor {{ template_monitor }}
[instance-default]
<= instance-template
......
......@@ -85,6 +85,7 @@ KEDIFA_PORT = '15080'
# IP to originate requests from
# has to be not partition one
SOURCE_IP = '127.0.0.1'
SOURCE_IPV6 = '::1'
# IP on which test run, in order to mimic HTTP[s] access
TEST_IP = os.environ['SLAPOS_TEST_IPV4']
......@@ -321,6 +322,10 @@ class TestDataMixin(object):
for replacement in sorted(data_replacement_dict.keys()):
value = data_replacement_dict[replacement]
if isinstance(value, list):
for v in value:
runtime_data = runtime_data.replace(v, replacement)
else:
runtime_data = runtime_data.replace(value, replacement)
longMessage = self.longMessage
......@@ -439,12 +444,15 @@ class TestDataMixin(object):
# sent like this to the real master
parameter_dict['_'] = json.loads(parameter_dict['_'])
parameter_dict['timestamp'] = '@@TIMESTAMP@@'
# remove ip_list since it's unused and the order may be unstable
parameter_dict.pop('ip_list', None)
cluster_request_parameter_list.append(parameter_dict)
# XXX: Dirty decode/encode/decode...?
data_replacement_dict = {
'@@_ipv4_address@@': self._ipv4_address,
'@@_ipv6_address@@': self._ipv6_address,
'@@_ipv6_address@@': [
self.master_ipv6, self.kedifa_ipv6, self.caddy_frontend1_ipv6],
'@@_server_http_port@@': str(self._server_http_port),
'@@_server_https_auth_port@@': str(self._server_https_auth_port),
'@@_server_https_port@@': str(self._server_https_port),
......@@ -736,7 +744,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.backend_url = 'http://%s:%s/' % server.server_address
server_process = multiprocessing.Process(
target=server.serve_forever, name='HTTPServer')
target=server.serve_forever, name='HTTPServer', daemon=True)
server_process.start()
# from now on, socket is used by server subprocess, we can close it
server.socket.close()
......@@ -744,7 +752,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.backend_https_url = 'https://%s:%s/' % server_https.server_address
server_https_process = multiprocessing.Process(
target=server_https.serve_forever, name='HTTPSServer')
target=server_https.serve_forever, name='HTTPSServer', daemon=True)
server_https_process.start()
server_https.socket.close()
cls.logger.debug('Started process %s' % (server_https_process,))
......@@ -756,7 +764,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
(cls._ipv4_address, cls._server_netloc_a_http_port),
NetlocHandler)
netloc_a_http_process = multiprocessing.Process(
target=netloc_a_http.serve_forever, name='netloc-a-http')
target=netloc_a_http.serve_forever, name='netloc-a-http', daemon=True)
netloc_a_http_process.start()
netloc_a_http.socket.close()
......@@ -764,7 +772,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
(cls._ipv4_address, cls._server_netloc_b_http_port),
NetlocHandler)
netloc_b_http_process = multiprocessing.Process(
target=netloc_b_http.serve_forever, name='netloc-b-http')
target=netloc_b_http.serve_forever, name='netloc-b-http', daemon=True)
netloc_b_http_process.start()
netloc_b_http.socket.close()
......@@ -821,7 +829,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
% server_https_auth.server_address
self.server_https_auth_process = multiprocessing.Process(
target=server_https_auth.serve_forever, name='HTTPSServerAuth')
target=server_https_auth.serve_forever, name='HTTPSServerAuth', daemon=True)
self.server_https_auth_process.start()
server_https_auth.socket.close()
self.logger.debug('Started process %s' % (self.server_https_auth_process,))
......@@ -1063,7 +1071,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
generate_auth_url = parameter_dict.pop('%skey-generate-auth-url' % (
prefix,))
upload_url = parameter_dict.pop('%skey-upload-url' % (prefix,))
kedifa_ipv6_base = 'https://[%s]:%s' % (self._ipv6_address, KEDIFA_PORT)
kedifa_ipv6_base = 'https://[%s]:%s' % (self.kedifa_ipv6, KEDIFA_PORT)
base = '^' + kedifa_ipv6_base.replace(
'[', r'\[').replace(']', r'\]') + '/.{32}'
self.assertRegex(
......@@ -1078,7 +1086,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
kedifa_caucase_url = parameter_dict.pop('kedifa-caucase-url')
self.assertEqual(
kedifa_caucase_url,
'http://[%s]:%s' % (self._ipv6_address, CAUCASE_PORT),
'http://[%s]:%s' % (self.kedifa_ipv6, CAUCASE_PORT),
)
return generate_auth_url, upload_url
......@@ -1244,6 +1252,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.setUp = lambda self: self.fail('Setup Class failed.')
raise
# Lookup partitions IPv6
cls.master_ipv6 = cls.computer_partition_ipv6_address
kedifa_partition = cls.getPartitionId('kedifa')
cls.kedifa_ipv6 = cls.getPartitionIPv6(kedifa_partition)
caddy_frontend1_partition = cls.getPartitionId('caddy-frontend-1')
cls.caddy_frontend1_ipv6 = cls.getPartitionIPv6(caddy_frontend1_partition)
class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
def _get_backend_haproxy_configuration(self):
......@@ -1380,7 +1395,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
})
self.assertEqual(
expected_parameter_dict,
......@@ -1429,8 +1444,8 @@ class TestMasterRequestDomain(HttpFrontendTestCase, TestDataMixin):
self.assertEqual(
{
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '0',
'rejected-slave-amount': '0',
......@@ -1461,8 +1476,8 @@ class TestMasterRequest(HttpFrontendTestCase, TestDataMixin):
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'None',
'accepted-slave-amount': '0',
'rejected-slave-amount': '0',
......@@ -1503,8 +1518,11 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
_, kedifa_key_pem, _, kedifa_csr_pem = createCSR('Kedifa User')
_, backend_client_key_pem, _, backend_client_csr_pem = createCSR(
'Backend Client User')
parameter_dict = cls.requestDefaultInstance(
).getConnectionParameterDict()
cls.computer_partition = cls.requestDefaultInstance()
# Compute IPv6 here since super()._setUpClass failed
cls.computer_partition_ipv6_address = cls.getPartitionIPv6(
cls.computer_partition.getId())
parameter_dict = cls.computer_partition.getConnectionParameterDict()
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
cls._fetchBackendClientCaCertificateFile(parameter_dict)
with open(cls.kedifa_caucase_ca_certificate_file) as fh:
......@@ -1577,8 +1595,8 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'None',
'accepted-slave-amount': '0',
'rejected-slave-amount': '0',
......@@ -1937,34 +1955,36 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
monitor_setup_url_key = 'monitor-setup-url'
def test_monitor_setup(self):
IP = self._ipv6_address
MASTER_IP = self.master_ipv6
KEDIFA_IP = self.kedifa_ipv6
CADDY_IP = self.caddy_frontend1_ipv6
self.monitor_configuration_list = [
{
'htmlUrl': 'https://[%s]:8401/public/feed' % (IP,),
'htmlUrl': 'https://[%s]:8401/public/feed' % (MASTER_IP,),
'text': 'testing partition 0',
'title': 'testing partition 0',
'type': 'rss',
'url': 'https://[%s]:8401/share/private/' % (IP,),
'url': 'https://[%s]:8401/share/private/' % (MASTER_IP,),
'version': 'RSS',
'xmlUrl': 'https://[%s]:8401/public/feed' % (IP,),
'xmlUrl': 'https://[%s]:8401/public/feed' % (MASTER_IP,),
},
{
'htmlUrl': 'https://[%s]:8402/public/feed' % (IP,),
'htmlUrl': 'https://[%s]:8402/public/feed' % (KEDIFA_IP,),
'text': 'kedifa',
'title': 'kedifa',
'type': 'rss',
'url': 'https://[%s]:8402/share/private/' % (IP,),
'url': 'https://[%s]:8402/share/private/' % (KEDIFA_IP,),
'version': 'RSS',
'xmlUrl': 'https://[%s]:8402/public/feed' % (IP,),
'xmlUrl': 'https://[%s]:8402/public/feed' % (KEDIFA_IP,),
},
{
'htmlUrl': 'https://[%s]:8411/public/feed' % (IP,),
'htmlUrl': 'https://[%s]:8411/public/feed' % (CADDY_IP,),
'text': 'caddy-frontend-1',
'title': 'caddy-frontend-1',
'type': 'rss',
'url': 'https://[%s]:8411/share/private/' % (IP,),
'url': 'https://[%s]:8411/share/private/' % (CADDY_IP,),
'version': 'RSS',
'xmlUrl': 'https://[%s]:8411/public/feed' % (IP,),
'xmlUrl': 'https://[%s]:8411/public/feed' % (CADDY_IP,),
},
]
connection_parameter_dict = self\
......@@ -2098,8 +2118,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
self.assertNodeInformationWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '62',
'rejected-slave-amount': '0',
......@@ -2429,14 +2449,14 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
# check out access via IPv6
out_ipv6, err_ipv6 = self._curl(
parameter_dict['domain'], self._ipv6_address, HTTPS_PORT)
parameter_dict['domain'], self.caddy_frontend1_ipv6, HTTPS_PORT,
source_ip=SOURCE_IPV6)
try:
j = json.loads(out_ipv6.decode())
except Exception:
raise ValueError('JSON decode problem in:\n%s' % (out_ipv6.decode(),))
self.assertEqual(
self._ipv6_address,
SOURCE_IPV6,
j['Incoming Headers']['x-forwarded-for']
)
......@@ -4618,7 +4638,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
result.headers['Location']
)
def _curl(self, domain, ip, port, cookie=None):
def _curl(self, domain, ip, port, cookie=None, source_ip=None):
replacement_dict = dict(
domain=domain, ip=ip, port=port)
curl_command = [
......@@ -4628,6 +4648,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
]
if cookie is not None:
curl_command.extend(['--cookie', cookie])
if source_ip is not None:
curl_command.extend(['--interface', source_ip])
curl_command.extend([
'https://%(domain)s:%(port)s/' % replacement_dict])
prc = subprocess.Popen(
......@@ -4797,7 +4819,47 @@ class TestEnableHttp2ByDefaultFalseSlave(TestSlave):
test_enable_http3_false_http_version = '1'
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
class ReplicateSlaveMixin(object):
def frontends1And2HaveDifferentIPv6(self):
_, *prefixlen = self._ipv6_address.split('/')
return bool(prefixlen and int(prefixlen[0]) < 127)
def requestSecondFrontend(self, final_state='stopped'):
ipv6_collision = not self.frontends1And2HaveDifferentIPv6()
# now instantiate 2nd partition in started state
# and due to port collision, stop the first one...
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.requestSlaves()
try:
self.slap.waitForInstance(self.instance_max_retry)
except Exception:
if ipv6_collision:
raise
# for now, accept failing promise due to stopped frontend
finally:
# ...and be nice, put back the first one online
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': final_state,
})
self.requestDefaultInstance()
for _ in range(3):
try:
self.slap.waitForInstance(self.instance_max_retry)
except Exception:
if ipv6_collision:
raise
# for now, accept failing promise due to stopped frontend
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin, ReplicateSlaveMixin):
instance_parameter_dict = {
'domain': 'example.com',
'port': HTTPS_PORT,
......@@ -4819,27 +4881,12 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
},
}
def frontends1And2HaveDifferentIPv6(self):
_, *prefixlen = self._ipv6_address.split('/')
return bool(prefixlen and int(prefixlen[0]) < 127)
def test(self):
# now instantiate 2nd partition in started state
# and due to port collision, stop the first one...
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.requestSlaves()
self.slap.waitForInstance(self.instance_max_retry)
# ...and be nice, put back the first one online
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': 'stopped',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.requestSecondFrontend()
self.updateSlaveConnectionParameterDictDict()
# the real assertions follow...
......@@ -4871,7 +4918,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
'url': 'http://replicate.example.com',
'site_url': 'http://replicate.example.com',
'secure_access': 'https://replicate.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
},
parameter_dict
)
......@@ -4906,7 +4953,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
)
class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase):
class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase, ReplicateSlaveMixin):
instance_parameter_dict = {
'domain': 'example.com',
'port': HTTPS_PORT,
......@@ -4929,27 +4976,7 @@ class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase):
}
def test_extra_slave_instance_list_not_present_destroyed_request(self):
# now instantiate 2nd partition in started state
# and due to port collision, stop the first one
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
# now start back first instance, and destroy 2nd one
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': 'destroyed',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.requestSecondFrontend(final_state='destroyed')
buildout_file = os.path.join(
self.getMasterPartitionPath(), 'instance-master.cfg')
......@@ -5331,8 +5358,8 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '12',
'rejected-slave-amount': '0',
......@@ -5838,8 +5865,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '1',
'rejected-slave-amount': '0',
......@@ -5930,8 +5957,8 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '2',
'rejected-slave-amount': '0',
......@@ -6192,8 +6219,8 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
self.assertRejectedSlavePromiseWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com',
'accepted-slave-amount': '3',
'rejected-slave-amount': '28',
......@@ -6428,7 +6455,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
'url': 'http://defaultpathunsafe.example.com',
'site_url': 'http://defaultpathunsafe.example.com',
'secure_access': 'https://defaultpathunsafe.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
},
parameter_dict
)
......@@ -6722,8 +6749,8 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
'kedifa'].pop('monitor-password')
)
backend_client_caucase_url = 'http://[%s]:8990' % (self._ipv6_address,)
kedifa_caucase_url = 'http://[%s]:15090' % (self._ipv6_address,)
backend_client_caucase_url = 'http://[%s]:8990' % (self.master_ipv6,)
kedifa_caucase_url = 'http://[%s]:15090' % (self.kedifa_ipv6,)
expected_partition_parameter_dict_dict = {
'caddy-frontend-1': {
'X-software_release_url': base_software_url,
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -80,16 +70,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -119,16 +99,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -5,16 +5,6 @@
"enable-http2-by-default": "false",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -844,16 +834,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -885,16 +865,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -5,16 +5,6 @@
"caucase_port": "15090",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -39,16 +29,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -77,16 +57,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -3,16 +3,6 @@
"caucase_port": "15090",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -37,16 +27,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -75,16 +55,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -38,16 +28,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -77,16 +57,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -52,16 +42,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -91,16 +71,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -52,16 +42,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -91,16 +71,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -8,16 +8,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -56,16 +46,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -95,16 +75,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......@@ -134,16 +104,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-2",
"ip_list": [
[
"T-3",
"@@_ipv4_address@@"
],
[
"T-3",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-3",
......
......@@ -38,23 +38,3 @@ T-2:slave-instrospection-nginx-{hash-generic}-on-watch RUNNING
T-2:slave-introspection-safe-graceful EXITED
T-2:trafficserver-{hash-generic}-on-watch RUNNING
T-2:trafficserver-reload EXITED
T-3:backend-client-login-certificate-caucase-updater-on-watch STOPPED
T-3:backend-haproxy-{hash-generic}-on-watch STOPPED
T-3:backend-haproxy-rsyslogd-{hash-generic}-on-watch STOPPED
T-3:backend-haproxy-safe-graceful EXITED
T-3:bootstrap-monitor EXITED
T-3:certificate_authority-{hash-generic}-on-watch STOPPED
T-3:crond-{hash-generic}-on-watch STOPPED
T-3:expose-csr-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-rsyslogd-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-safe-graceful EXITED
T-3:kedifa-login-certificate-caucase-updater-on-watch STOPPED
T-3:kedifa-updater-{hash-generic}-on-watch STOPPED
T-3:logrotate-setup-validate EXITED
T-3:monitor-httpd-{hash-generic}-on-watch STOPPED
T-3:monitor-httpd-graceful EXITED
T-3:slave-instrospection-nginx-{hash-generic}-on-watch STOPPED
T-3:slave-introspection-safe-graceful EXITED
T-3:trafficserver-{hash-generic}-on-watch STOPPED
T-3:trafficserver-reload EXITED
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -843,16 +833,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -883,16 +863,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -5,16 +5,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -67,16 +57,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -107,16 +87,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -257,16 +247,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -297,16 +277,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -4,16 +4,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -65,16 +55,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -105,16 +85,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -6,16 +6,6 @@
"full_address_list": [],
"http3-port": "11443",
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -845,16 +835,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -885,16 +865,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -6,16 +6,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -214,16 +204,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -255,16 +235,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -6,16 +6,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -54,16 +44,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -95,16 +75,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -6,16 +6,6 @@
"domain": "example.com",
"full_address_list": [],
"instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080",
"plain_http_port": "11080",
"port": "11443",
......@@ -54,16 +44,6 @@
},
"full_address_list": [],
"instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-1",
......@@ -95,16 +75,6 @@
},
"full_address_list": [],
"instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0",
"slap_computer_id": "local",
"slap_computer_partition_id": "T-2",
......
......@@ -15,4 +15,4 @@
[instance.cfg.in]
filename = instance.cfg.in
md5sum = 69237df07b8819e2eb683702b8cd199a
md5sum = 361991f333119f22c8266dc8bde7bc57
......@@ -110,7 +110,7 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:_buildout_se
recipe = plone.recipe.command
command =
if [ ! -f '${:csr}' ] ; then
{{ openssl_bin }} req \
{{ openssl_bin }} req \
-newkey rsa:2048 \
-batch \
-new \
......
......@@ -158,14 +158,14 @@ class TestResticRestServer(SlapOSInstanceTestCase):
return cnx.sock._sslobj.getpeercert()
cert_before = _getpeercert()
# execute certificate updater two month later, when it's time to renew certificate.
# execute certificate updater when it's time to renew certificate.
# use a timeout, because this service runs forever
subprocess.run(
(
'timeout',
'5',
'faketime',
'+2 months',
'+63 days',
os.path.join(
self.computer_partition_root_path,
'etc/service/rest-server-certificate-updater'),
......
......@@ -3,7 +3,7 @@
# Seleniumrunner responsability is to install Xvfb as
# ${buildout:parts-directory}/xserver/bin/Xvfb, a default firefox as
# ${buildout:bin-directory}/firefox and a geckodriver as
# ${buildout:bin-directory}/geckodriver for erp5testnode.
# ${buildout:bin-directory}/geckodriver for erp5testnode.
[buildout]
extends =
......
......@@ -125,7 +125,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual(
f'https://[{self._ipv6_address}]:8888/tree',
f'https://[{self.getPartitionIPv6(self.getPartitionId("jupyter"))}]:8888/tree',
param_dict['jupyter-url']
)
......
......@@ -15,4 +15,4 @@
[template]
filename = instance.cfg
md5sum = ed0f91f39d5eda903938aa527625f40d
md5sum = f10fbca22d1d30dd7a4f36e1cd521b97
......@@ -95,6 +95,11 @@ inline =
echo "To work on a test, execute:"
echo " SLAPOS_TEST_DEBUG=1 {{ interpreter }} -m unittest discover -v"
echo "from a folder containing software release test."
echo "Tip: you may want to also add"
echo " SLAPOS_TEST_SKIP_SOFTWARE_REBUILD=1"
echo " SLAPOS_TEST_SKIP_SOFTWARE_CHECK=1"
echo " SLAPOS_TEST_LOG_DIRECTORY=<some-empty-directory>"
echo "See https://lab.nexedi.com/nexedi/slapos/tree/master/software/slapos-sr-testing"
echo
[publish]
......
......@@ -15,11 +15,11 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = b406265f591f54a0d5a6aa3ff8522764
md5sum = c484bba770c6404ba0a5b2a958b07a68
[instance]
_update_hash_filename_ = instance.cfg.in
md5sum = f322033a7670b9be29b1bf1bf9024b87
md5sum = 9658a11340c018de816d0de40683706a
[instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in
......
......@@ -593,7 +593,11 @@ output = $${directory:bin}/$${:_buildout_section_name_}
embedded-request-exitcode-file = $${directory:statefiles}/embedded-request.exitcode
standalone-ran-before-flag = $${directory:statefiles}/standalone-ran-before.flag
shared-part-list =
{%- if 'shared-part-list' in slap_connection %}
{{ slap_connection['shared-part-list'] | indent(2) }}
{%- else %}
{{ """${buildout:shared-part-list}""" | indent(2) }}
{%- endif %}
context =
raw python_for_standalone ${python-for-standalone:executable}
raw request_script_path $${directory:project}/request-embedded-instance.sh
......
......@@ -33,6 +33,7 @@ url = ${instance-theia:output}
output = $${buildout:directory}/instance-theia.cfg
extensions = jinja2.ext.do
context =
section slap_connection slap-connection
jsonkey default_parameter_dict :default-parameters
key parameter_dict slap-configuration:configuration
key root_title slap-configuration:root-instance-title
......
......@@ -26,6 +26,7 @@
##############################################################################
from __future__ import unicode_literals
import configparser
import json
import logging
import os
......@@ -33,6 +34,7 @@ import re
import subprocess
import sqlite3
import time
import unittest
import netaddr
import pexpect
......@@ -45,6 +47,7 @@ from six.moves.urllib.parse import urlparse, urljoin
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError
from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath
from slapos.proxy.db_version import DB_VERSION
from slapos.slap.standalone import SlapOSConfigWriter
theia_software_release_url = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
......@@ -246,6 +249,12 @@ class TestTheia(TheiaTestCase):
proxy_path = self.getPath('srv', 'runner', 'var', 'proxy.db')
query = "SELECT partition_reference, address FROM partition_network%s" % DB_VERSION
ipv6, *prefixlen = self._ipv6_address.split('/')
if not prefixlen:
raise unittest.SkipTest('No IPv6 range')
elif int(prefixlen[0]) >= 123:
raise unittest.SkipTest('IPv6 range too small: %s' % self._ipv6_address)
with sqlite3.connect(proxy_path) as db:
rows = db.execute(query).fetchall()
partitions = set(p for p, _ in rows)
......@@ -510,6 +519,32 @@ class TestTheiaEnv(TheiaTestCase):
self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME'])
class TestTheiaSharedPath(TheiaTestCase):
bogus_path = 'bogus'
@classmethod
def setUpClass(cls):
super(TestTheiaSharedPath, cls).setUpClass()
# Change shared part list to include bogus paths
cls.slap._shared_part_list.append(cls.bogus_path)
SlapOSConfigWriter(cls.slap).writeConfig(cls.slap._slapos_config)
# Re-instanciate
cls.slap._force_slapos_node_instance_all = True
try:
cls.waitForInstance()
finally:
cls.slap._force_slapos_node_instance_all = False
def test(self):
theia_cfg_path = self.getPath('srv', 'runner', 'etc', 'slapos.cfg')
cfg = configparser.ConfigParser()
cfg.read(theia_cfg_path)
self.assertTrue(cfg.has_option('slapos', 'shared_part_list'))
shared_parts_string = cfg.get('slapos', 'shared_part_list')
shared_parts_list = [s.strip() for s in shared_parts_string.splitlines()]
self.assertIn(self.bogus_path, shared_parts_list)
class ResilientTheiaMixin(object):
@classmethod
def setUpClass(cls):
......
......@@ -184,6 +184,7 @@ class ExportAndImportMixin(object):
def assertPromiseSucess(self):
# Force promises to recompute regardless of periodicity
old_value = self.slap._force_slapos_node_instance_all
self.slap._force_slapos_node_instance_all = True
try:
self.slap.waitForInstance(error_lines=0)
......@@ -193,8 +194,8 @@ class ExportAndImportMixin(object):
self.assertNotIn('ERROR export script', s)
self.assertNotIn("Promise 'resiliency-import-promise.py' failed", s)
self.assertNotIn('ERROR import script', s)
else:
pass
finally:
self.slap._force_slapos_node_instance_all = old_value
def _doExport(self):
# Compute last modification of the export exitcode file
......@@ -247,6 +248,7 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC
def assertPromiseFailure(self, *msg):
# Force promises to recompute regardless of periodicity
old_value = self.slap._force_slapos_node_instance_all
self.slap._force_slapos_node_instance_all = True
try:
self.slap.waitForInstance(error_lines=0)
......@@ -256,6 +258,8 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC
self.assertIn(m, s)
else:
self.fail('No promise failed')
finally:
self.slap._force_slapos_node_instance_all = old_value
def assertScriptFailure(self, func, errorfile, exitfile, *msg):
self.assertRaises(
......
......@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 30a1e738a8211887e75a5e75820e5872
md5sum = b056ec4fec956c2a07befd2fec116f04
[template-zeo]
filename = instance-zeo.cfg.in
......
......@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%}
......
......@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e
[template-erp5]
filename = instance-erp5.cfg.in
md5sum = 30a1e738a8211887e75a5e75820e5872
md5sum = 098e1d02159aeca9b36f2a0726b7b230
[template-zeo]
filename = instance-zeo.cfg.in
......
......@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%}
......@@ -280,7 +280,10 @@ software-type = zope
{% for custom_name, zope_parameter_dict in six.iteritems(zope_partition_dict) -%}
{% set partition_name = 'zope-' ~ custom_name -%}
{% set section_name = 'request-' ~ partition_name -%}
{% set check_software_url_section_name = 'check-software-url' ~ partition_name -%}
{% set promise_software_url_section_name = 'promise-software-url' ~ partition_name -%}
{% set check_test_runner_url_section_name = 'check-test-runner-url' ~ partition_name -%}
{% set promise_test_runner_url_section_name = 'promise-test-runner-url' ~ partition_name -%}
{% set zope_family = zope_parameter_dict.get('family', 'default') -%}
{% do zope_family_name_list.append(zope_family) %}
{% set backend_path = zope_parameter_dict.get('backend-path', '') % {'site-id': site_id} %}
......@@ -315,7 +318,26 @@ config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activi
{% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
[{{ promise_software_url_section_name }}]
[{{ check_test_runner_url_section_name }}]
# Promise to wait for zope partition to receive the expected test-runner URL
recipe = slapos.cookbook:check_parameter
value = {{ '${' ~ section_name ~ ':config-test-runner-apache-url-list}' }}
expected-not-value = not-ready
expected-value =
path = ${directory:bin}/${:_buildout_section_name_}
[{{ promise_test_runner_url_section_name }}]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ '${' ~ check_test_runner_url_section_name ~ ':path}' }}
{% do root_common.section(promise_test_runner_url_section_name) -%}
{% endif -%}
[{{ check_software_url_section_name }}]
# Promise to wait for zope partition to use the expected software URL,
# used on upgrades.
recipe = slapos.cookbook:check_parameter
......@@ -324,9 +346,14 @@ expected-not-value =
expected-value = ${slap-connection:software-release-url}
path = ${directory:bin}/${:_buildout_section_name_}
[{{ promise_software_url_section_name }}]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ '${' ~ check_software_url_section_name ~ ':path}' }}
{% do root_common.section(promise_software_url_section_name) -%}
{% endif -%}
{% endfor -%}
{# if not explicitly configured, connect jupyter to first zope family, which -#}
......
......@@ -297,7 +297,7 @@ simplegeneric = 0.8.1
singledispatch = 3.4.0.3
six = 1.16.0
slapos.cookbook = 1.0.329
slapos.core = 1.10.1
slapos.core = 1.10.2
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment