Commit 79b9c851 authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents 5b55ce53 2e5ab151
...@@ -25,6 +25,8 @@ shared = true ...@@ -25,6 +25,8 @@ shared = true
version = 1.7.2 version = 1.7.2
md5sum = 9a00835e4da8c215348e263b550fc130 md5sum = 9a00835e4da8c215348e263b550fc130
url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2 url = https://archive.apache.org/dist/apr/apr-${:version}.tar.bz2
environment =
LDFLAGS=-Wl,-rpath=${libuuid:location}/lib
[apr-util] [apr-util]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -35,6 +37,9 @@ md5sum = b6e8c9b31d938fe5797ceb0d1ff2eb69 ...@@ -35,6 +37,9 @@ md5sum = b6e8c9b31d938fe5797ceb0d1ff2eb69
configure-options = configure-options =
--with-apr=${apr:location} --with-apr=${apr:location}
--with-expat=${libexpat:location} --with-expat=${libexpat:location}
--without-sqlite3
environment =
LDFLAGS=-Wl,-rpath=${libuuid:location}/lib
[apache] [apache]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
...@@ -97,8 +102,8 @@ configure-options = --disable-static ...@@ -97,8 +102,8 @@ configure-options = --disable-static
environment = environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s PATH=${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
CPPFLAGS =-I${libuuid:location}/include -I${openssl:location}/include -I${apr:location}/include -I${apr-util:location}/include CPPFLAGS=-I${libuuid:location}/include -I${openssl:location}/include -I${apr:location}/include -I${apr-util:location}/include
LDFLAGS =-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${apr:location}/lib -Wl,-rpath=${apr:location}/lib -L${apr-util:location}/lib -Wl,-rpath=${apr-util:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib LDFLAGS=-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib -Wl,-rpath=${pcre:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${apr:location}/lib -Wl,-rpath=${apr:location}/lib -L${apr-util:location}/lib -Wl,-rpath=${apr-util:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
[apache-antiloris] [apache-antiloris]
# Note: This component tries to write in a [apache] parts, which is now # Note: This component tries to write in a [apache] parts, which is now
......
...@@ -30,6 +30,6 @@ configure-options = ...@@ -30,6 +30,6 @@ configure-options =
--with-systemdsystemunitdir=no --with-systemdsystemunitdir=no
environment = environment =
PATH=${pkgconfig:location}/bin:${glib:location}/bin:%(PATH)s PATH=${pkgconfig:location}/bin:${glib:location}/bin:%(PATH)s
CFLAGS=-I${gdbm:location}/include CFLAGS=-I${gdbm:location}/include -I${libexpat:location}/include
LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib LDFLAGS=-L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${glib:location}/lib -Wl,-rpath=${glib:location}/lib -L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig:${dbus:location}/lib/pkgconfig:${libexpat:location}/lib/pkgconfig
...@@ -9,7 +9,7 @@ recipe = slapos.recipe.build:gitclone ...@@ -9,7 +9,7 @@ recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/babeld.git repository = https://lab.nexedi.com/nexedi/babeld.git
branch = master branch = master
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
revision = v1.12.1-nxd1 revision = v1.12.1-nxd3
[babeld] [babeld]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
......
...@@ -36,7 +36,7 @@ shared = true ...@@ -36,7 +36,7 @@ shared = true
pyyaml = ${pyyaml-download:target} pyyaml = ${pyyaml-download:target}
pyaml = ${pyaml-download:target} pyaml = ${pyaml-download:target}
init = init =
# add the python executable in the options dict so that # add the python executable in the options dict so that
# buildout signature changes if python executable changes # buildout signature changes if python executable changes
import sys import sys
options['python-executable'] = sys.executable options['python-executable'] = sys.executable
......
...@@ -11,8 +11,9 @@ recipe = slapos.recipe.cmmi ...@@ -11,8 +11,9 @@ recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.0.tar.xz url = https://ftp.gnu.org/gnu/coreutils/coreutils-9.0.tar.xz
md5sum = 0d79ae8a6124546e3b94171375e5e5d0 md5sum = 0d79ae8a6124546e3b94171375e5e5d0
configure-options = configure-options =
--disable-libcap --disable-libcap
--without-selinux
--prefix=@@LOCATION@@ --prefix=@@LOCATION@@
environment = environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
......
...@@ -11,3 +11,4 @@ md5sum = 0bbd38f12656e4728e2f7c4708aec014 ...@@ -11,3 +11,4 @@ md5sum = 0bbd38f12656e4728e2f7c4708aec014
configure-options = configure-options =
--disable-static --disable-static
--enable-libgdbm-compat --enable-libgdbm-compat
--without-readline
...@@ -31,17 +31,25 @@ environment = ...@@ -31,17 +31,25 @@ environment =
GOROOT_FINAL=${:location} GOROOT_FINAL=${:location}
${:environment-extra} ${:environment-extra}
# TestChown and TestSCMCredentials currently fail in a user-namespace # TestChown currently fails in a user-namespace
# https://github.com/golang/go/issues/42525 # https://github.com/golang/go/issues/42525
# the patches apply to go >= 1.12 # the patches apply to go >= 1.12
patch-options = -p1 patch-options = -p1
patches = patches =
${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f ${:_profile_base_location_}/skip-chown-tests.patch#d4e3c8ef83788fb2a5d80dd75034786f
[golang-common-pre-1.19]
<= golang-common
# TestSCMCredentials fails in a user-namespace if golang version < 1.19
# https://github.com/golang/go/issues/42525
patches +=
${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59 ${:_profile_base_location_}/fix-TestSCMCredentials.patch#1d8dbc97cd579e03fafd8627d48f1c59
[golang14] [golang14]
<= golang-common <= golang-common-pre-1.19
# https://golang.org/doc/install/source#bootstrapFromSource # https://golang.org/doc/install/source#bootstrapFromSource
url = https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz url = https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz
md5sum = dbf727a4b0e365bf88d97cbfde590016 md5sum = dbf727a4b0e365bf88d97cbfde590016
...@@ -64,7 +72,7 @@ setarch = setarch arm ...@@ -64,7 +72,7 @@ setarch = setarch arm
[golang1.12] [golang1.12]
<= golang-common <= golang-common-pre-1.19
url = https://golang.org/dl/go1.12.17.src.tar.gz url = https://golang.org/dl/go1.12.17.src.tar.gz
md5sum = 6b607fc795391dc609ffd79ebf41f080 md5sum = 6b607fc795391dc609ffd79ebf41f080
...@@ -73,7 +81,7 @@ environment-extra = ...@@ -73,7 +81,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang1.16] [golang1.16]
<= golang-common <= golang-common-pre-1.19
url = https://golang.org/dl/go1.16.13.src.tar.gz url = https://golang.org/dl/go1.16.13.src.tar.gz
md5sum = 1c076f952d9af57590a36fa7d36f695a md5sum = 1c076f952d9af57590a36fa7d36f695a
...@@ -82,7 +90,7 @@ environment-extra = ...@@ -82,7 +90,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang1.17] [golang1.17]
<= golang-common <= golang-common-pre-1.19
url = https://golang.org/dl/go1.17.13.src.tar.gz url = https://golang.org/dl/go1.17.13.src.tar.gz
md5sum = 4476707f05cf6915ec1173038dc357a9 md5sum = 4476707f05cf6915ec1173038dc357a9
...@@ -91,7 +99,7 @@ environment-extra = ...@@ -91,7 +99,7 @@ environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang1.18] [golang1.18]
<= golang-common <= golang-common-pre-1.19
url = https://golang.org/dl/go1.18.9.src.tar.gz url = https://golang.org/dl/go1.18.9.src.tar.gz
md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1 md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1
...@@ -102,6 +110,14 @@ md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1 ...@@ -102,6 +110,14 @@ md5sum = e2caa7c4de49aa77a14c694bfc9a5cd1
environment-extra = environment-extra =
GOROOT_BOOTSTRAP=${golang14:location} GOROOT_BOOTSTRAP=${golang14:location}
[golang1.20]
<= golang-common
url = https://go.dev/dl/go1.20.6.src.tar.gz
md5sum = 1dc2d18790cfaede7df1e73a1eff8b7b
# go1.20 requires go1.17.13 to bootstrap (see https://go.dev/doc/go1.20#bootstrap)
environment-extra =
GOROOT_BOOTSTRAP=${golang1.17:location}
# ---- infrastructure to build Go workspaces / projects ---- # ---- infrastructure to build Go workspaces / projects ----
# gowork is the top-level section that defines Go workspace. # gowork is the top-level section that defines Go workspace.
...@@ -161,7 +177,7 @@ bin = ${gowork.dir:bin} ...@@ -161,7 +177,7 @@ bin = ${gowork.dir:bin}
depends = ${gowork.goinstall:recipe} depends = ${gowork.goinstall:recipe}
# go version used for the workspace (possible to override in applications) # go version used for the workspace (possible to override in applications)
golang = ${golang1.18:location} golang = ${golang1.20:location}
# no special build flags by default # no special build flags by default
buildflags = buildflags =
......
...@@ -11,6 +11,8 @@ shared = true ...@@ -11,6 +11,8 @@ shared = true
url = https://github.com/logrotate/logrotate/releases/download/3.18.1/logrotate-3.18.1.tar.xz url = https://github.com/logrotate/logrotate/releases/download/3.18.1/logrotate-3.18.1.tar.xz
md5sum = 07d5aba26c350f9ab5730c25a7277751 md5sum = 07d5aba26c350f9ab5730c25a7277751
# BBB this is only for backward-compatibility. # BBB this is only for backward-compatibility.
configure-options =
--with-selinux=no
post-install = post-install =
ln -nsf . @@LOCATION@@/usr ln -nsf . @@LOCATION@@/usr
environment = environment =
......
...@@ -11,6 +11,7 @@ extends = ...@@ -11,6 +11,7 @@ extends =
../openssl/buildout.cfg ../openssl/buildout.cfg
../readline/buildout.cfg ../readline/buildout.cfg
../sqlite3/buildout.cfg ../sqlite3/buildout.cfg
../util-linux/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
...@@ -45,35 +46,35 @@ pre-install = mkdir profile-opt ...@@ -45,35 +46,35 @@ pre-install = mkdir profile-opt
# which would otherwise load the system libmagic.so with ctypes # which would otherwise load the system libmagic.so with ctypes
environment = environment =
PATH=${xz-utils:location}/bin:%(PATH)s PATH=${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${zlib:location}/include -I${xz-utils:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include CPPFLAGS=-I${zlib:location}/include -I${xz-utils:location}/include -I${readline:location}/include -I${libexpat:location}/include -I${libffi:location}/include -I${ncurses:location}/include -I${ncurses:location}/include -I${bzip2:location}/include -I${gdbm:location}/include -I${openssl:location}/include -I${sqlite3:location}/include -I${gettext:location}/include -I${libuuid:location}/include
LDFLAGS=-L${zlib:location}/lib -L${xz-utils:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${xz-utils:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib LDFLAGS=-L${zlib:location}/lib -L${xz-utils:location}/lib -L${readline:location}/lib -L${libexpat:location}/lib -L${libffi:location}/lib -L${ncurses:location}/lib -L${bzip2:location}/lib -L${gdbm:location}/lib -L${openssl:location}/lib -L${sqlite3:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${xz-utils:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${libexpat:location}/lib -Wl,-rpath=${libffi:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${gdbm:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${sqlite3:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${libuuid:location}/lib
[python3.7] [python3.7]
<= python3-common <= python3-common
version = 3.7 version = 3.7
package_version = 3.7.15 package_version = 3.7.17
md5sum = d2ff16776b5d822efc2a7cbf42fc2915 md5sum = dd94cab4541b57b88cf3dab32d6336e3
[python3.8] [python3.8]
<= python3-common <= python3-common
version = 3.8 version = 3.8
package_version = 3.8.15 package_version = 3.8.18
md5sum = cca78a827d2327f5c3ff2dee9f526e7e md5sum = 5ea6267ea00513fc31d3746feb35842d
[python3.9] [python3.9]
<= python3-common <= python3-common
version = 3.9 version = 3.9
package_version = 3.9.15 package_version = 3.9.18
md5sum = 8adc5662c9fd10a23ae8ae9f28b65b49 md5sum = 765576c3af57deb046819ecd57804bbb
[python3.10] [python3.10]
<= python3-common <= python3-common
version = 3.10 version = 3.10
package_version = 3.10.8 package_version = 3.10.13
md5sum = e92356b012ed4d0e09675131d39b1bde md5sum = 8847dc6458d1431d0ae0f55942deeb89
[python3.11] [python3.11]
<= python3-common <= python3-common
version = 3.11 version = 3.11
package_version = 3.11.0 package_version = 3.11.5
md5sum = fe92acfa0db9b9f5044958edb451d463 md5sum = 393856f1b7713aa8bba4b642ab9985d3
...@@ -45,6 +45,7 @@ patches = ...@@ -45,6 +45,7 @@ patches =
configure-options = configure-options =
--enable-multibyte --enable-multibyte
--disable-static --disable-static
--with-curses
environment = environment =
CPPFLAGS=-I${ncurses:location}/include CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
...@@ -10,6 +10,7 @@ recipe = slapos.recipe.cmmi ...@@ -10,6 +10,7 @@ recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://sqlite.org/2022/sqlite-autoconf-3370200.tar.gz url = https://sqlite.org/2022/sqlite-autoconf-3370200.tar.gz
md5sum = 683cc5312ee74e71079c14d24b7a6d27 md5sum = 683cc5312ee74e71079c14d24b7a6d27
pre-configure = sed -i 's/-ltinfo//g;s/-ltermcap//g;s/ termcap//;s/ curses//' configure
configure-options = configure-options =
--disable-static --disable-static
--enable-readline --enable-readline
......
...@@ -50,7 +50,7 @@ CGO_LDFLAGS += -Wl,-rpath=${zlib:location}/lib ...@@ -50,7 +50,7 @@ CGO_LDFLAGS += -Wl,-rpath=${zlib:location}/lib
recipe = slapos.recipe.build:gitclone recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/wendelin.core.git repository = https://lab.nexedi.com/nexedi/wendelin.core.git
branch = master branch = master
revision = wendelin.core-2.0.alpha3-0-g6315384 revision = wendelin.core-2.0.alpha3-7-g885b355
# dir is pretty name as top-level recipe # dir is pretty name as top-level recipe
location = ${buildout:parts-directory}/wendelin.core location = ${buildout:parts-directory}/wendelin.core
git-executable = ${git:location}/bin/git git-executable = ${git:location}/bin/git
...@@ -221,49 +221,6 @@ class TestWaitForFiles(WrapperTestCase): ...@@ -221,49 +221,6 @@ class TestWaitForFiles(WrapperTestCase):
self.fail('process did not start after file was created') self.fail('process did not start after file was created')
@unittest.skipUnless(sys.platform.startswith("linux"), "Inotify is linux only")
class TestWaitForFilesInotifyError(TestWaitForFiles):
def setUp(self):
super(TestWaitForFilesInotifyError, self).setUp()
# use LD_PRELOAD to inject errors into inotify_add_watch calls
inotify_mock_c = self.getTempPath('inotify_mock.c')
inotify_mock_o = self.getTempPath('inotify_mock.o')
inotify_mock_so = self.getTempPath('inotify_mock.so')
with open(inotify_mock_c, 'w') as f:
f.write('''
#include <sys/inotify.h>
#include <string.h>
#include <errno.h>
int inotify_add_watch(int fd, const char *pathname, uint32_t mask) {
errno = ENOSPC;
return -1;
}
/* This is a bit tricky because inotify_simple calls
inotify_add_watch with ctypes.CDLL("libc.so"), which uses
dlopen("libc.so") and dlsym("inotify_add_watch"), so we first
override dlsym to return our own inotify_add_watch.
https://github.com/chrisjbillington/inotify_simple/blob/55737898/inotify_simple.py#L110
*/
extern void *__libc_dlsym (void *, const char *);
void *dlsym(void *handle, const char *symbol) {
if (strcmp(symbol, "inotify_add_watch") == 0) {
return (void *)inotify_add_watch;
}
return (void *)__libc_dlsym(handle, symbol);
}
''')
subprocess.check_call(['gcc', '-c', '-fPIC', '-o', inotify_mock_o, inotify_mock_c])
subprocess.check_call(['gcc', '-shared', '-o', inotify_mock_so, inotify_mock_o])
self.env = dict(
os.environ,
PYTHONUNBUFFERED='1',
LD_PRELOAD=inotify_mock_so)
expected_output = 'Error using inotify, falling back to polling\ndone\n'
class TestPrivateTmpFS(WrapperTestCase): class TestPrivateTmpFS(WrapperTestCase):
def getOptions(self): def getOptions(self):
......
...@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase): ...@@ -45,7 +45,7 @@ class TestBackupServer(InstanceTestCase):
# Check that there is a RSS feed # Check that there is a RSS feed
self.assertTrue('rss' in parameter_dict) self.assertTrue('rss' in parameter_dict)
self.assertTrue(parameter_dict['rss'].startswith( self.assertTrue(parameter_dict['rss'].startswith(
f'https://[{self._ipv6_address}]:9443/' f'https://[{self.computer_partition_ipv6_address}]:9443/'
)) ))
result = requests.get( result = requests.get(
......
...@@ -47,7 +47,7 @@ class TestCaucase(SlapOSInstanceTestCase): ...@@ -47,7 +47,7 @@ class TestCaucase(SlapOSInstanceTestCase):
connection_parameter_dict = self.deserializeConnectionParameter() connection_parameter_dict = self.deserializeConnectionParameter()
self.assertEqual( self.assertEqual(
connection_parameter_dict, connection_parameter_dict,
{'url': 'http://[%s]:8009' % (self._ipv6_address,)} {'url': 'http://[%s]:8009' % (self.computer_partition_ipv6_address,)}
) )
result = requests.get(connection_parameter_dict['url']) result = requests.get(connection_parameter_dict['url'])
...@@ -57,14 +57,14 @@ class TestCaucase(SlapOSInstanceTestCase): ...@@ -57,14 +57,14 @@ class TestCaucase(SlapOSInstanceTestCase):
{ {
'_links': { '_links': {
'self': { 'self': {
'href': 'http://[%s]:8009' % (self._ipv6_address,) 'href': 'http://[%s]:8009' % (self.computer_partition_ipv6_address,)
}, },
'getCAUHAL': { 'getCAUHAL': {
'href': 'http://[%s]:8009//cau' % (self._ipv6_address,), 'href': 'http://[%s]:8009//cau' % (self.computer_partition_ipv6_address,),
'title': 'cau' 'title': 'cau'
}, },
'getCASHAL': { 'getCASHAL': {
'href': 'http://[%s]:8009//cas' % (self._ipv6_address,), 'href': 'http://[%s]:8009//cas' % (self.computer_partition_ipv6_address,),
'title': 'cas' 'title': 'cas'
} }
} }
......
...@@ -16,7 +16,7 @@ shared = true ...@@ -16,7 +16,7 @@ shared = true
url = https://github.com/sigoden/dufs/archive/refs/tags/v0.34.1.tar.gz url = https://github.com/sigoden/dufs/archive/refs/tags/v0.34.1.tar.gz
md5sum = 77cbb2523aca8dad90fd77ee0277704f md5sum = 77cbb2523aca8dad90fd77ee0277704f
configure-command = : configure-command = :
make-binary = cargo install --root=%(location)s --path . make-binary = cargo install --root=%(location)s --path . --locked
make-targets = make-targets =
environment = environment =
PATH=${rustc:location}/bin:%(PATH)s PATH=${rustc:location}/bin:%(PATH)s
......
...@@ -131,14 +131,14 @@ class TestFileServer(SlapOSInstanceTestCase): ...@@ -131,14 +131,14 @@ class TestFileServer(SlapOSInstanceTestCase):
return cnx.sock._sslobj.getpeercert() return cnx.sock._sslobj.getpeercert()
cert_before = _getpeercert() cert_before = _getpeercert()
# execute certificate updater two month later, when it's time to renew certificate. # execute certificate updater when it's time to renew certificate.
# use a timeout, because this service runs forever # use a timeout, because this service runs forever
subprocess.run( subprocess.run(
( (
'timeout', 'timeout',
'5', '5',
'faketime', 'faketime',
'+2 months', '+63 days',
os.path.join( os.path.join(
self.computer_partition_root_path, self.computer_partition_root_path,
'etc/service/dufs-certificate-updater'), 'etc/service/dufs-certificate-updater'),
......
...@@ -147,7 +147,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin): ...@@ -147,7 +147,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual( self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address, 'https://[%s]:8888/tree' % self.computer_partition_ipv6_address,
param_dict['jupyter-url'] param_dict['jupyter-url']
) )
......
...@@ -138,7 +138,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin): ...@@ -138,7 +138,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual( self.assertEqual(
'https://[%s]:8888/tree' % self._ipv6_address, 'https://[%s]:8888/tree' % self.getPartitionIPv6(self.getPartitionId("jupyter")),
param_dict['jupyter-url'] param_dict['jupyter-url']
) )
......
...@@ -115,7 +115,7 @@ class WendelinTutorialTestCase(FluentdTestCase): ...@@ -115,7 +115,7 @@ class WendelinTutorialTestCase(FluentdTestCase):
round(random.uniform(-20, 50), 3))] round(random.uniform(-20, 50), 3))]
def serve(self, port, request_handler_class): def serve(self, port, request_handler_class):
server_address = (self._ipv6_address, port) server_address = (self.computer_partition_ipv6_address, port)
server = OneRequestServer(server_address, request_handler_class) server = OneRequestServer(server_address, request_handler_class)
data = server.get_first_data(FLUSH_INTERVAL) data = server.get_first_data(FLUSH_INTERVAL)
...@@ -181,7 +181,7 @@ class SensorConfTestCase(WendelinTutorialTestCase): ...@@ -181,7 +181,7 @@ class SensorConfTestCase(WendelinTutorialTestCase):
@type forward @type forward
<server> <server>
name myserver1 name myserver1
host {cls._ipv6_address} host {cls.computer_partition_ipv6_address}
</server> </server>
<buffer> <buffer>
flush_mode immediate flush_mode immediate
...@@ -199,7 +199,7 @@ print("{measurement_text}")''' ...@@ -199,7 +199,7 @@ print("{measurement_text}")'''
def test_configuration(self): def test_configuration(self):
self._test_configuration( self._test_configuration(
fr'adding forwarding server \'myserver1\' host="{self._ipv6_address}" port={FLUENTD_PORT} weight=60' fr'adding forwarding server \'myserver1\' host="{self.computer_partition_ipv6_address}" port={FLUENTD_PORT} weight=60'
) )
def test_send_data(self): def test_send_data(self):
...@@ -232,11 +232,11 @@ class GatewayConfTestCase(WendelinTutorialTestCase): ...@@ -232,11 +232,11 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
<source> <source>
@type forward @type forward
port {fluentd_port} port {fluentd_port}
bind {cls._ipv6_address} bind {cls.computer_partition_ipv6_address}
</source> </source>
<match tag.name> <match tag.name>
@type wendelin @type wendelin
streamtool_uri http://[{cls._ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default streamtool_uri http://[{cls.computer_partition_ipv6_address}]:{wendelin_port}/erp5/portal_ingestion_policies/default
user foo user foo
password bar password bar
<buffer> <buffer>
...@@ -249,9 +249,9 @@ class GatewayConfTestCase(WendelinTutorialTestCase): ...@@ -249,9 +249,9 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
@classmethod @classmethod
def get_configuration(cls): def get_configuration(cls):
fluentd_port = findFreeTCPPort(cls._ipv6_address) fluentd_port = findFreeTCPPort(cls.computer_partition_ipv6_address)
cls._fluentd_port = fluentd_port cls._fluentd_port = fluentd_port
wendelin_port = findFreeTCPPort(cls._ipv6_address) wendelin_port = findFreeTCPPort(cls.computer_partition_ipv6_address)
cls._wendelin_port = wendelin_port cls._wendelin_port = wendelin_port
return cls.gateway_conf(fluentd_port, wendelin_port) return cls.gateway_conf(fluentd_port, wendelin_port)
...@@ -260,7 +260,7 @@ class GatewayConfTestCase(WendelinTutorialTestCase): ...@@ -260,7 +260,7 @@ class GatewayConfTestCase(WendelinTutorialTestCase):
def test_wendelin_data_forwarding(self): def test_wendelin_data_forwarding(self):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.connect((self._ipv6_address, self._fluentd_port)) sock.connect((self.computer_partition_ipv6_address, self._fluentd_port))
data = [ data = [
msgpack.ExtType(0, struct.pack('!Q', int(time.time()) << 32)), msgpack.ExtType(0, struct.pack('!Q', int(time.time()) << 32)),
......
...@@ -39,4 +39,4 @@ md5sum = ad2baf4599a937d7352034a41fa24814 ...@@ -39,4 +39,4 @@ md5sum = ad2baf4599a937d7352034a41fa24814
[promtail-config-file] [promtail-config-file]
filename = promtail-config-file.cfg.in filename = promtail-config-file.cfg.in
md5sum = c8c9d815dd7b427788c066f041f04573 md5sum = 5f1b3a1a3d3f98daeab4780106452d71
...@@ -7,7 +7,7 @@ server: ...@@ -7,7 +7,7 @@ server:
external_url: {{ promtail['url'] }} external_url: {{ promtail['url'] }}
positions: positions:
filename: {{ promtail['dir'] }}/positions.yaml filename: {{ promtail['dir'] }}/positions.yaml
clients: clients:
- url: {{ loki['url'] }}/api/prom/push - url: {{ loki['url'] }}/api/prom/push
......
...@@ -319,7 +319,7 @@ class TestListenInPartition(GrafanaTestCase): ...@@ -319,7 +319,7 @@ class TestListenInPartition(GrafanaTestCase):
c.laddr for c in self.process_dict['grafana'].connections() c.laddr for c in self.process_dict['grafana'].connections()
if c.status == 'LISTEN' if c.status == 'LISTEN'
], ],
[(self._ipv6_address, 8180)], [(self.computer_partition_ipv6_address, 8180)],
) )
def test_influxdb_listen(self): def test_influxdb_listen(self):
...@@ -330,7 +330,7 @@ class TestListenInPartition(GrafanaTestCase): ...@@ -330,7 +330,7 @@ class TestListenInPartition(GrafanaTestCase):
]), ]),
[ [
(self._ipv4_address, 8088), (self._ipv4_address, 8088),
(self._ipv6_address, 8086), (self.computer_partition_ipv6_address, 8086),
], ],
) )
......
...@@ -43,7 +43,7 @@ class TestHtmlValidatorServer(InstanceTestCase): ...@@ -43,7 +43,7 @@ class TestHtmlValidatorServer(InstanceTestCase):
self.assertTrue('vnu-url' in parameter_dict) self.assertTrue('vnu-url' in parameter_dict)
self.assertEqual( self.assertEqual(
'https://[%s]:8899/' % (self._ipv6_address, ), 'https://[%s]:8899/' % (self.computer_partition_ipv6_address, ),
parameter_dict['vnu-url'] parameter_dict['vnu-url']
) )
......
...@@ -52,7 +52,7 @@ class TestJSTestNode(InstanceTestCase): ...@@ -52,7 +52,7 @@ class TestJSTestNode(InstanceTestCase):
self.assertEqual( self.assertEqual(
{ {
'nginx': 'http://[%s]:9443/' % (self._ipv6_address, ) 'nginx': 'http://[%s]:9443/' % (self.computer_partition_ipv6_address, )
}, },
connection_dict connection_dict
) )
...@@ -83,7 +83,7 @@ class TestJSTestNode(InstanceTestCase): ...@@ -83,7 +83,7 @@ class TestJSTestNode(InstanceTestCase):
# Default access # Default access
result = requests.get( result = requests.get(
'http://[%s]:9443' % (self._ipv6_address, ), allow_redirects=False) 'http://[%s]:9443' % (self.computer_partition_ipv6_address, ), allow_redirects=False)
self.assertEqual( self.assertEqual(
[requests.codes.forbidden, False], [requests.codes.forbidden, False],
[result.status_code, result.is_redirect] [result.status_code, result.is_redirect]
......
...@@ -52,10 +52,10 @@ class TestJupyter(InstanceTestCase): ...@@ -52,10 +52,10 @@ class TestJupyter(InstanceTestCase):
self.assertEqual( self.assertEqual(
{ {
'jupyter-classic-url': 'https://[%s]:8888/tree' % (self._ipv6_address, ), 'jupyter-classic-url': 'https://[%s]:8888/tree' % (self.computer_partition_ipv6_address, ),
'jupyterlab-url': 'https://[%s]:8888/lab' % (self._ipv6_address, ), 'jupyterlab-url': 'https://[%s]:8888/lab' % (self.computer_partition_ipv6_address, ),
'password': '%s' % (password, ), 'password': '%s' % (password, ),
'url': 'https://[%s]:8888/tree' % (self._ipv6_address, ) 'url': 'https://[%s]:8888/tree' % (self.computer_partition_ipv6_address, )
}, },
connection_dict connection_dict
) )
......
...@@ -115,6 +115,46 @@ bootstrap_machine_param_dict = { ...@@ -115,6 +115,46 @@ bootstrap_machine_param_dict = {
} }
class KVMTestCase(InstanceTestCase):
@classmethod
def _findTopLevelPartitionPath(cls, path):
index = 0
while True:
index = path.find(os.path.sep, index) + len(os.path.sep)
top_path = path[:index]
if os.path.exists(os.path.join(top_path, '.slapos-resource')):
return top_path
if index == -1:
return None
@classmethod
def _updateSlaposResource(cls, partition_path, **kw):
with open(os.path.join(partition_path, '.slapos-resource'), 'r+') as f:
resource = json.load(f)
resource.update(kw)
f.seek(0)
f.truncate()
json.dump(resource, f, indent=2)
@classmethod
def formatPartitions(cls):
super().formatPartitions()
# steal tap from top level partition
instance_directory = cls.slap.instance_directory
top_partition_path = cls._findTopLevelPartitionPath(instance_directory)
with open(os.path.join(top_partition_path, '.slapos-resource')) as f:
top_resource = json.load(f)
for partition in os.listdir(instance_directory):
if not partition.startswith(cls.__partition_reference__):
continue
partition_path = os.path.join(instance_directory, partition)
cls._updateSlaposResource(partition_path, tap=top_resource['tap'])
class KvmMixin: class KvmMixin:
def getConnectionParameterDictJson(self): def getConnectionParameterDictJson(self):
return json.loads( return json.loads(
...@@ -176,7 +216,7 @@ class KvmMixinJson: ...@@ -176,7 +216,7 @@ class KvmMixinJson:
@skipUnlessKvm @skipUnlessKvm
class TestInstance(InstanceTestCase, KvmMixin): class TestInstance(KVMTestCase, KvmMixin):
__partition_reference__ = 'i' __partition_reference__ = 'i'
def test(self): def test(self):
...@@ -192,12 +232,12 @@ class TestInstance(InstanceTestCase, KvmMixin): ...@@ -192,12 +232,12 @@ class TestInstance(InstanceTestCase, KvmMixin):
self.assertEqual( self.assertEqual(
connection_parameter_dict, connection_parameter_dict,
{ {
'ipv6': self._ipv6_address, 'ipv6': self.computer_partition_ipv6_address,
'maximum-extra-disk-amount': '0', 'maximum-extra-disk-amount': '0',
'monitor-base-url': f'https://[{self._ipv6_address}]:8026', 'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022', 'nat-rule-port-tcp-22': f'{self.computer_partition_ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443', 'nat-rule-port-tcp-443': f'{self.computer_partition_ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080', 'nat-rule-port-tcp-80': f'{self.computer_partition_ipv6_address} : 10080',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
...@@ -227,7 +267,7 @@ class TestInstanceJson( ...@@ -227,7 +267,7 @@ class TestInstanceJson(
@skipUnlessKvm @skipUnlessKvm
class TestMemoryManagement(InstanceTestCase, KvmMixin): class TestMemoryManagement(KVMTestCase, KvmMixin):
__partition_reference__ = 'i' __partition_reference__ = 'i'
def getKvmProcessInfo(self, switch_list): def getKvmProcessInfo(self, switch_list):
...@@ -395,7 +435,7 @@ class MonitorAccessMixin(KvmMixin): ...@@ -395,7 +435,7 @@ class MonitorAccessMixin(KvmMixin):
@skipUnlessKvm @skipUnlessKvm
class TestAccessDefault(MonitorAccessMixin, InstanceTestCase): class TestAccessDefault(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'ad' __partition_reference__ = 'ad'
expected_partition_with_monitor_base_url_count = 1 expected_partition_with_monitor_base_url_count = 1
...@@ -416,7 +456,7 @@ class TestAccessDefaultJson(KvmMixinJson, TestAccessDefault): ...@@ -416,7 +456,7 @@ class TestAccessDefaultJson(KvmMixinJson, TestAccessDefault):
@skipUnlessKvm @skipUnlessKvm
class TestAccessDefaultAdditional(MonitorAccessMixin, InstanceTestCase): class TestAccessDefaultAdditional(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'ada' __partition_reference__ = 'ada'
expected_partition_with_monitor_base_url_count = 1 expected_partition_with_monitor_base_url_count = 1
...@@ -452,7 +492,7 @@ class TestAccessDefaultAdditionalJson( ...@@ -452,7 +492,7 @@ class TestAccessDefaultAdditionalJson(
@skipUnlessKvm @skipUnlessKvm
class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase): class TestAccessDefaultBootstrap(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'adb' __partition_reference__ = 'adb'
expected_partition_with_monitor_base_url_count = 1 expected_partition_with_monitor_base_url_count = 1
...@@ -464,27 +504,22 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase): ...@@ -464,27 +504,22 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
def test(self): def test(self):
# START: mock .slapos-resource with tap.ipv4_addr # START: mock .slapos-resource with tap.ipv4_addr
# needed for netconfig.sh # needed for netconfig.sh
test_partition_slapos_resource_file = os.path.join( partition_path = self.computer_partition_root_path
self.computer_partition_root_path, '.slapos-resource') top_partition_path = self._findTopLevelPartitionPath(partition_path)
path = os.path.realpath(os.curdir)
while path != '/': with open(os.path.join(top_partition_path, '.slapos-resource')) as f:
root_slapos_resource_file = os.path.join(path, '.slapos-resource') top_tap = json.load(f)['tap']
if os.path.exists(root_slapos_resource_file):
break if top_tap['ipv4_addr'] == '':
path = os.path.realpath(os.path.join(path, '..')) top_tap.update({
else:
raise ValueError('No .slapos-resource found to base the mock on')
with open(root_slapos_resource_file) as fh:
root_slapos_resource = json.load(fh)
if root_slapos_resource['tap']['ipv4_addr'] == '':
root_slapos_resource['tap'].update({
"ipv4_addr": "10.0.0.2", "ipv4_addr": "10.0.0.2",
"ipv4_gateway": "10.0.0.1", "ipv4_gateway": "10.0.0.1",
"ipv4_netmask": "255.255.0.0", "ipv4_netmask": "255.255.0.0",
"ipv4_network": "10.0.0.0" "ipv4_network": "10.0.0.0"
}) })
with open(test_partition_slapos_resource_file, 'w') as fh:
json.dump(root_slapos_resource, fh, indent=4) self._updateSlaposResource(partition_path, tap=top_tap)
self.slap.waitForInstance(max_retry=10) self.slap.waitForInstance(max_retry=10)
# END: mock .slapos-resource with tap.ipv4_addr # END: mock .slapos-resource with tap.ipv4_addr
...@@ -505,7 +540,7 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase): ...@@ -505,7 +540,7 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm @skipUnlessKvm
class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase): class TestAccessKvmCluster(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akc' __partition_reference__ = 'akc'
expected_partition_with_monitor_base_url_count = 2 expected_partition_with_monitor_base_url_count = 2
...@@ -535,7 +570,7 @@ class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase): ...@@ -535,7 +570,7 @@ class TestAccessKvmCluster(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm @skipUnlessKvm
class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase): class TestAccessKvmClusterAdditional(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akca' __partition_reference__ = 'akca'
expected_partition_with_monitor_base_url_count = 2 expected_partition_with_monitor_base_url_count = 2
...@@ -575,7 +610,7 @@ class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase): ...@@ -575,7 +610,7 @@ class TestAccessKvmClusterAdditional(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm @skipUnlessKvm
class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase): class TestAccessKvmClusterBootstrap(MonitorAccessMixin, KVMTestCase):
__partition_reference__ = 'akcb' __partition_reference__ = 'akcb'
expected_partition_with_monitor_base_url_count = 3 expected_partition_with_monitor_base_url_count = 3
...@@ -618,7 +653,7 @@ class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase): ...@@ -618,7 +653,7 @@ class TestAccessKvmClusterBootstrap(MonitorAccessMixin, InstanceTestCase):
@skipUnlessKvm @skipUnlessKvm
class TestInstanceResilient(InstanceTestCase, KvmMixin): class TestInstanceResilient(KVMTestCase, KvmMixin):
__partition_reference__ = 'ir' __partition_reference__ = 'ir'
instance_max_retry = 20 instance_max_retry = 20
...@@ -626,6 +661,13 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin): ...@@ -626,6 +661,13 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
def getInstanceSoftwareType(cls): def getInstanceSoftwareType(cls):
return 'kvm-resilient' return 'kvm-resilient'
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.pbs1_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('PBS (kvm / 1)'))
cls.kvm0_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('kvm0'))
cls.kvm1_ipv6 = cls.getPartitionIPv6(cls.getPartitionId('kvm1'))
def test_kvm_exporter(self): def test_kvm_exporter(self):
exporter_partition = os.path.join( exporter_partition = os.path.join(
self.slap.instance_directory, self.slap.instance_directory,
...@@ -661,19 +703,19 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin): ...@@ -661,19 +703,19 @@ class TestInstanceResilient(InstanceTestCase, KvmMixin):
self.assertRegex( self.assertRegex(
feed_pull, feed_pull,
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull'.format( 'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-pull'.format(
self._ipv6_address)) self.pbs1_ipv6))
feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push') feed_push = connection_parameter_dict.pop('feed-url-kvm-1-push')
self.assertRegex( self.assertRegex(
feed_push, feed_push,
'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push'.format( 'http://\\[{}\\]:[0-9][0-9][0-9][0-9]/get/local-ir0-kvm-1-push'.format(
self._ipv6_address)) self.pbs1_ipv6))
self.assertEqual( self.assertEqual(
connection_parameter_dict, connection_parameter_dict,
{ {
'ipv6': self._ipv6_address, 'ipv6': self.kvm0_ipv6,
'monitor-base-url': f'https://[{self._ipv6_address}]:8160', 'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8160',
'monitor-user': 'admin', 'monitor-user': 'admin',
'takeover-kvm-1-url': f'http://[{self._ipv6_address}]:9263/', 'takeover-kvm-1-url': f'http://[{self.kvm1_ipv6}]:9263/',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
...@@ -733,7 +775,7 @@ class TestInstanceResilientJson( ...@@ -733,7 +775,7 @@ class TestInstanceResilientJson(
@skipUnlessKvm @skipUnlessKvm
class TestInstanceResilientDiskTypeIde(InstanceTestCase, KvmMixin): class TestInstanceResilientDiskTypeIde(KVMTestCase, KvmMixin):
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
...@@ -748,7 +790,7 @@ class TestInstanceResilientDiskTypeIdeJson( ...@@ -748,7 +790,7 @@ class TestInstanceResilientDiskTypeIdeJson(
@skipUnlessKvm @skipUnlessKvm
class TestAccessResilientAdditional(InstanceTestCase): class TestAccessResilientAdditional(KVMTestCase):
__partition_reference__ = 'ara' __partition_reference__ = 'ara'
expected_partition_with_monitor_base_url_count = 1 expected_partition_with_monitor_base_url_count = 1
...@@ -788,7 +830,7 @@ class TestAccessResilientAdditionalJson( ...@@ -788,7 +830,7 @@ class TestAccessResilientAdditionalJson(
pass pass
class TestInstanceNbdServer(InstanceTestCase): class TestInstanceNbdServer(KVMTestCase):
__partition_reference__ = 'ins' __partition_reference__ = 'ins'
instance_max_retry = 5 instance_max_retry = 5
...@@ -870,7 +912,7 @@ class FakeImageServerMixin(KvmMixin): ...@@ -870,7 +912,7 @@ class FakeImageServerMixin(KvmMixin):
os.chdir(cls.image_source_directory) os.chdir(cls.image_source_directory)
try: try:
cls.server_process = multiprocessing.Process( cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='FakeImageHttpServer') target=server.serve_forever, name='FakeImageHttpServer', daemon=True)
cls.server_process.start() cls.server_process.start()
server.socket.close() server.socket.close()
finally: finally:
...@@ -890,7 +932,7 @@ class FakeImageServerMixin(KvmMixin): ...@@ -890,7 +932,7 @@ class FakeImageServerMixin(KvmMixin):
@skipUnlessKvm @skipUnlessKvm
class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): class TestBootImageUrlList(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'biul' __partition_reference__ = 'biul'
kvm_instance_partition_reference = 'biul0' kvm_instance_partition_reference = 'biul0'
...@@ -931,8 +973,12 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin): ...@@ -931,8 +973,12 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.startImageHttpServer() try:
super().setUpClass() cls.startImageHttpServer()
super().setUpClass()
except BaseException:
cls.stopImageHttpServer()
raise
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
...@@ -1246,7 +1292,7 @@ class TestBootImageUrlSelectResilientJson( ...@@ -1246,7 +1292,7 @@ class TestBootImageUrlSelectResilientJson(
@skipUnlessKvm @skipUnlessKvm
class TestBootImageUrlListKvmCluster(InstanceTestCase, FakeImageServerMixin): class TestBootImageUrlListKvmCluster(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'biulkc' __partition_reference__ = 'biulkc'
@classmethod @classmethod
...@@ -1324,7 +1370,7 @@ class TestBootImageUrlSelectKvmCluster(TestBootImageUrlListKvmCluster): ...@@ -1324,7 +1370,7 @@ class TestBootImageUrlSelectKvmCluster(TestBootImageUrlListKvmCluster):
@skipUnlessKvm @skipUnlessKvm
class TestNatRules(KvmMixin, InstanceTestCase): class TestNatRules(KvmMixin, KVMTestCase):
__partition_reference__ = 'nr' __partition_reference__ = 'nr'
@classmethod @classmethod
...@@ -1340,11 +1386,11 @@ class TestNatRules(KvmMixin, InstanceTestCase): ...@@ -1340,11 +1386,11 @@ class TestNatRules(KvmMixin, InstanceTestCase):
self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict) self.assertIn('nat-rule-port-tcp-200', connection_parameter_dict)
self.assertEqual( self.assertEqual(
f'{self._ipv6_address} : 10100', f'{self.computer_partition_ipv6_address} : 10100',
connection_parameter_dict['nat-rule-port-tcp-100'] connection_parameter_dict['nat-rule-port-tcp-100']
) )
self.assertEqual( self.assertEqual(
f'{self._ipv6_address} : 10200', f'{self.computer_partition_ipv6_address} : 10200',
connection_parameter_dict['nat-rule-port-tcp-200'] connection_parameter_dict['nat-rule-port-tcp-200']
) )
...@@ -1356,7 +1402,7 @@ class TestNatRulesJson( ...@@ -1356,7 +1402,7 @@ class TestNatRulesJson(
@skipUnlessKvm @skipUnlessKvm
class TestNatRulesKvmCluster(InstanceTestCase): class TestNatRulesKvmCluster(KVMTestCase):
__partition_reference__ = 'nrkc' __partition_reference__ = 'nrkc'
nat_rules = ["100", "200", "300"] nat_rules = ["100", "200", "300"]
...@@ -1405,7 +1451,7 @@ class TestNatRulesKvmClusterComplex(TestNatRulesKvmCluster): ...@@ -1405,7 +1451,7 @@ class TestNatRulesKvmClusterComplex(TestNatRulesKvmCluster):
@skipUnlessKvm @skipUnlessKvm
class TestWhitelistFirewall(InstanceTestCase): class TestWhitelistFirewall(KVMTestCase):
__partition_reference__ = 'wf' __partition_reference__ = 'wf'
kvm_instance_partition_reference = 'wf0' kvm_instance_partition_reference = 'wf0'
...@@ -1533,7 +1579,7 @@ class TestWhitelistFirewallRequestCluster(TestWhitelistFirewallRequest): ...@@ -1533,7 +1579,7 @@ class TestWhitelistFirewallRequestCluster(TestWhitelistFirewallRequest):
@skipUnlessKvm @skipUnlessKvm
class TestDiskDevicePathWipeDiskOndestroy(InstanceTestCase, KvmMixin): class TestDiskDevicePathWipeDiskOndestroy(KVMTestCase, KvmMixin):
__partition_reference__ = 'ddpwdo' __partition_reference__ = 'ddpwdo'
kvm_instance_partition_reference = 'ddpwdo0' kvm_instance_partition_reference = 'ddpwdo0'
...@@ -1568,7 +1614,7 @@ class TestDiskDevicePathWipeDiskOndestroyJson( ...@@ -1568,7 +1614,7 @@ class TestDiskDevicePathWipeDiskOndestroyJson(
@skipUnlessKvm @skipUnlessKvm
class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin): class TestImageDownloadController(KVMTestCase, FakeImageServerMixin):
__partition_reference__ = 'idc' __partition_reference__ = 'idc'
maxDiff = None maxDiff = None
...@@ -1770,7 +1816,7 @@ INF: Storing errors in %(error_state_file)s ...@@ -1770,7 +1816,7 @@ INF: Storing errors in %(error_state_file)s
@skipUnlessKvm @skipUnlessKvm
class TestParameterDefault(InstanceTestCase, KvmMixin): class TestParameterDefault(KVMTestCase, KvmMixin):
__partition_reference__ = 'pd' __partition_reference__ = 'pd'
@classmethod @classmethod
...@@ -1879,22 +1925,17 @@ class ExternalDiskMixin(KvmMixin): ...@@ -1879,22 +1925,17 @@ class ExternalDiskMixin(KvmMixin):
def _prepareExternalStorageList(cls): def _prepareExternalStorageList(cls):
external_storage_path = os.path.join(cls.working_directory, 'STORAGE') external_storage_path = os.path.join(cls.working_directory, 'STORAGE')
os.mkdir(external_storage_path) os.mkdir(external_storage_path)
# reuse .slapos-resource infomration of the containing partition
# it's similar to slapos/recipe/slapconfiguration.py
_resource_home = cls.slap.instance_directory
parent_slapos_resource = None
while not os.path.exists(os.path.join(_resource_home, '.slapos-resource')):
_resource_home = os.path.normpath(os.path.join(_resource_home, '..'))
if _resource_home == "/":
break
else:
with open(os.path.join(_resource_home, '.slapos-resource')) as fh:
parent_slapos_resource = json.load(fh)
assert parent_slapos_resource is not None
for partition in os.listdir(cls.slap.instance_directory): # We already reuse tap from top level partition
instance_directory = cls.slap.instance_directory
for partition in os.listdir(instance_directory):
if not partition.startswith(cls.__partition_reference__): if not partition.startswith(cls.__partition_reference__):
continue continue
partition_path = os.path.join(instance_directory, partition)
partition_store_list = [] partition_store_list = []
for number in range(10): for number in range(10):
storage = os.path.join(external_storage_path, f'data{number}') storage = os.path.join(external_storage_path, f'data{number}')
...@@ -1903,13 +1944,12 @@ class ExternalDiskMixin(KvmMixin): ...@@ -1903,13 +1944,12 @@ class ExternalDiskMixin(KvmMixin):
partition_store = os.path.join(storage, partition) partition_store = os.path.join(storage, partition)
os.mkdir(partition_store) os.mkdir(partition_store)
partition_store_list.append(partition_store) partition_store_list.append(partition_store)
slapos_resource = parent_slapos_resource.copy()
slapos_resource['external_storage_list'] = partition_store_list cls._updateSlaposResource(
with open( partition_path,
os.path.join( external_storage_list=partition_store_list,
cls.slap.instance_directory, partition, '.slapos-resource'), )
'w') as fh:
json.dump(slapos_resource, fh, indent=2)
# above is not enough: the presence of parameter is required in slapos.cfg # above is not enough: the presence of parameter is required in slapos.cfg
slapos_config = [] slapos_config = []
with open(cls.slap._slapos_config) as fh: with open(cls.slap._slapos_config) as fh:
...@@ -1951,7 +1991,7 @@ class ExternalDiskMixin(KvmMixin): ...@@ -1951,7 +1991,7 @@ class ExternalDiskMixin(KvmMixin):
@skipUnlessKvm @skipUnlessKvm
class TestExternalDisk(InstanceTestCase, ExternalDiskMixin): class TestExternalDisk(KVMTestCase, ExternalDiskMixin):
__partition_reference__ = 'ed' __partition_reference__ = 'ed'
kvm_instance_partition_reference = 'ed0' kvm_instance_partition_reference = 'ed0'
...@@ -2101,7 +2141,7 @@ class ExternalDiskModernMixin(object): ...@@ -2101,7 +2141,7 @@ class ExternalDiskModernMixin(object):
@skipUnlessKvm @skipUnlessKvm
class TestExternalDiskModern( class TestExternalDiskModern(
ExternalDiskModernMixin, InstanceTestCase, ExternalDiskMixin): ExternalDiskModernMixin, KVMTestCase, ExternalDiskMixin):
def test(self): def test(self):
self.prepareEnv() self.prepareEnv()
self.waitForInstance() self.waitForInstance()
...@@ -2122,7 +2162,7 @@ class TestExternalDiskModern( ...@@ -2122,7 +2162,7 @@ class TestExternalDiskModern(
@skipUnlessKvm @skipUnlessKvm
class TestExternalDiskModernConflictAssurance( class TestExternalDiskModernConflictAssurance(
ExternalDiskModernMixin, InstanceTestCase, ExternalDiskMixin): ExternalDiskModernMixin, KVMTestCase, ExternalDiskMixin):
def test(self): def test(self):
self.prepareEnv() self.prepareEnv()
# Create conflicting configuration # Create conflicting configuration
...@@ -2182,7 +2222,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern): ...@@ -2182,7 +2222,7 @@ class TestExternalDiskModernCluster(TestExternalDiskModern):
@skipUnlessKvm @skipUnlessKvm
class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin): class TestExternalDiskModernIndexRequired(KVMTestCase, ExternalDiskMixin):
__partition_reference__ = 'edm' __partition_reference__ = 'edm'
kvm_instance_partition_reference = 'edm0' kvm_instance_partition_reference = 'edm0'
...@@ -2246,7 +2286,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin): ...@@ -2246,7 +2286,7 @@ class TestExternalDiskModernIndexRequired(InstanceTestCase, ExternalDiskMixin):
@skipUnlessKvm @skipUnlessKvm
class TestInstanceHttpServer(InstanceTestCase, KvmMixin): class TestInstanceHttpServer(KVMTestCase, KvmMixin):
__partition_reference__ = 'ihs' __partition_reference__ = 'ihs'
@classmethod @classmethod
...@@ -2269,7 +2309,7 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin): ...@@ -2269,7 +2309,7 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
os.chdir(cls.http_directory) os.chdir(cls.http_directory)
try: try:
cls.server_process = multiprocessing.Process( cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='HttpServer') target=server.serve_forever, name='HttpServer', daemon=True)
cls.server_process.start() cls.server_process.start()
finally: finally:
os.chdir(old_dir) os.chdir(old_dir)
...@@ -2288,8 +2328,12 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin): ...@@ -2288,8 +2328,12 @@ class TestInstanceHttpServer(InstanceTestCase, KvmMixin):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.startHttpServer() try:
super().setUpClass() cls.startHttpServer()
super().setUpClass()
except BaseException:
cls.stopHttpServer()
raise
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
...@@ -2320,12 +2364,12 @@ vm""", ...@@ -2320,12 +2364,12 @@ vm""",
self.assertEqual( self.assertEqual(
connection_parameter_dict, connection_parameter_dict,
{ {
'ipv6': self._ipv6_address, 'ipv6': self.computer_partition_ipv6_address,
'maximum-extra-disk-amount': '0', 'maximum-extra-disk-amount': '0',
'monitor-base-url': f'https://[{self._ipv6_address}]:8026', 'monitor-base-url': f'https://[{self.computer_partition_ipv6_address}]:8026',
'nat-rule-port-tcp-22': f'{self._ipv6_address} : 10022', 'nat-rule-port-tcp-22': f'{self.computer_partition_ipv6_address} : 10022',
'nat-rule-port-tcp-443': f'{self._ipv6_address} : 10443', 'nat-rule-port-tcp-443': f'{self.computer_partition_ipv6_address} : 10443',
'nat-rule-port-tcp-80': f'{self._ipv6_address} : 10080', 'nat-rule-port-tcp-80': f'{self.computer_partition_ipv6_address} : 10080',
} }
) )
self.assertEqual(set(present_key_list), set(assert_key_list)) self.assertEqual(set(present_key_list), set(assert_key_list))
......
[instance-profile] [instance-profile]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 685e7b371768f6977896d7214fd379f1 md5sum = 0d50ed911a41b76b952b63d37853c3a4
...@@ -44,11 +44,7 @@ command-line = sh -c "cd $${directory:srv-metabase}; ${java:location}/bin/java $ ...@@ -44,11 +44,7 @@ command-line = sh -c "cd $${directory:srv-metabase}; ${java:location}/bin/java $
environment = environment =
MB_EMOJI_IN_LOGS=false MB_EMOJI_IN_LOGS=false
MB_JETTY_HOST=$${:ip} MB_JETTY_HOST=$${:ip}
MB_JETTY_PORT=$${:-http-port} MB_JETTY_PORT=$${:port}
MB_JETTY_SSL_PORT=$${:port}
MB_JETTY_SSL=true
MB_JETTY_SSL_KEYSTORE=$${metabase-keystore:file}
MB_JETTY_SSL_KEYSTORE_PASSWORD=$${metabase-keystore:password}
MB_DB_TYPE=postgres MB_DB_TYPE=postgres
MB_DB_DBNAME=$${postgresql:dbname} MB_DB_DBNAME=$${postgresql:dbname}
MB_DB_PORT=$${postgresql:port} MB_DB_PORT=$${postgresql:port}
...@@ -62,68 +58,57 @@ environment = ...@@ -62,68 +58,57 @@ environment =
hash-existing-files = hash-existing-files =
$${buildout:directory}/software_release/buildout.cfg $${buildout:directory}/software_release/buildout.cfg
ip = $${instance-parameter:ipv6-random} ip = $${instance-parameter:ipv4-random}
port = 8443 port = 18080
# XXX It does not seem we can prevent metabase to also listen on http, so we
# give it an http port, but don't use it.
-http-port = 18080
hostname = [$${:ip}]
scheme = https
url = $${:scheme}://$${:hostname}:$${:port}
promises = promises =
$${metabase-promise:name} $${metabase-promise:name}
[metabase-promise] [metabase-frontend-certificate]
<= monitor-promise-base
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-instance:url}/api/session/properties
[metabase-keystore-password]
recipe = slapos.cookbook:generate.password
[metabase-keystore]
recipe = plone.recipe.command recipe = plone.recipe.command
stop-on-error = true
command = command =
if [ -f $${:file} ] if [ ! -e $${:cert-file} ]
then then
# XXX password used to be "insecure", but we changed to proper password. ${openssl-output:openssl} req -x509 -nodes -days 3650 \
# We try to list the store with the new password and if it fail we change -subj "/C=AA/ST=X/L=X/O=Dis/CN=$${:common-name}" \
# the keystore password. -newkey rsa -keyout $${:cert-file} \
if ! ${java:location}/bin/keytool \ -out $${:cert-file}
-list \
-keystore "$${:file}" \
-storepass "$${:password}"
then
echo "Migrating keystore password" && \
${java:location}/bin/keytool \
-storepasswd \
-keystore "$${:file}" \
-storepass insecure \
-new "$${:password}" && \
echo "Migrating certificate key password" && \
${java:location}/bin/keytool \
-keypasswd \
-alias "$${:alias}" \
-keypass insecure \
-new "$${:password}" \
-keystore "$${:file}" \
-storepass "$${:password}"
fi
else
${java:location}/bin/keytool \
-genkeypair \
-alias "$${:alias}" \
-keyalg RSA \
-keypass "$${:password}" \
-dname "CN=$${metabase-instance:ip},OU=Unit,O=Organization,L=City,S=State,C=Country" \
-keystore "$${:file}" \
-storepass "$${:password}"
fi fi
file = $${directory:etc}/.metabase_keystore update-command = $${:command}
password = $${metabase-keystore-password:passwd} cert-file = $${directory:var}/$${:_buildout_section_name_}.pem
alias = metabase common-name = $${metabase-frontend-config:ip}
location =
$${:cert-file}
[metabase-frontend-config]
recipe = slapos.recipe.template:jinja2
url = ${stack-haproxy-default-backend-config:target}
output = $${directory:etc}/$${:_buildout_section_name_}
context =
key pidfile :pidfile
key content :content
content =
listen app
log global
bind $${:ip}:$${:port} ssl crt $${metabase-frontend-certificate:cert-file} alpn h2,http/1.1
server app $${metabase-instance:ip}:$${metabase-instance:port}
pidfile = $${directory:run}/$${:_buildout_section_name_}.pid
ip = $${instance-parameter:ipv6-random}
port = 8443
[metabase-frontend]
recipe = slapos.cookbook:wrapper
wrapper-path = $${directory:services}/$${:_buildout_section_name_}
command-line =
${haproxy:location}/sbin/haproxy -f $${metabase-frontend-config:output}
url = https://[$${metabase-frontend-config:ip}]:$${metabase-frontend-config:port}
[metabase-promise]
<= monitor-promise-base
promise = check_url_available
name = $${:_buildout_section_name_}.py
config-url= $${metabase-frontend:url}/api/session/properties
[postgresql-password] [postgresql-password]
recipe = slapos.cookbook:generate.password recipe = slapos.cookbook:generate.password
...@@ -247,6 +232,7 @@ var-cron-entries = $${:var}/cron-entries ...@@ -247,6 +232,7 @@ var-cron-entries = $${:var}/cron-entries
srv = $${buildout:directory}/srv srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp tmp = $${buildout:directory}/tmp
run = $${buildout:directory}/run
service = $${:etc}/service service = $${:etc}/service
srv-metabase = $${:srv}/metabase srv-metabase = $${:srv}/metabase
srv-backup = $${:srv}/backup srv-backup = $${:srv}/backup
...@@ -254,6 +240,6 @@ fontconfig-cache = $${buildout:directory}/.fontconfig ...@@ -254,6 +240,6 @@ fontconfig-cache = $${buildout:directory}/.fontconfig
[publish-connection-parameter] [publish-connection-parameter]
recipe = slapos.cookbook:publish recipe = slapos.cookbook:publish
url = $${metabase-instance:url} url = $${metabase-frontend:url}
backup-crontab = $${postgresql-backup-crontab-entry:name} backup-crontab = $${postgresql-backup-crontab-entry:name}
restore-backup-script = $${postgresql-restore-backup:wrapper-path} restore-backup-script = $${postgresql-restore-backup:wrapper-path}
...@@ -6,6 +6,7 @@ extends = ...@@ -6,6 +6,7 @@ extends =
../../component/java/buildout.cfg ../../component/java/buildout.cfg
../../component/postgresql/buildout.cfg ../../component/postgresql/buildout.cfg
../../component/dcron/buildout.cfg ../../component/dcron/buildout.cfg
../../stack/haproxy/default-backend.cfg
../../stack/slapos.cfg ../../stack/slapos.cfg
buildout.hash.cfg buildout.hash.cfg
../../stack/monitor/buildout.cfg ../../stack/monitor/buildout.cfg
...@@ -19,8 +20,8 @@ parts = ...@@ -19,8 +20,8 @@ parts =
[metabase.jar] [metabase.jar]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = https://downloads.metabase.com/v0.45.2/metabase.jar url = https://downloads.metabase.com/v0.47.0/metabase.jar
md5sum = cca1f4d663ebfa60b3a6d93705b340a0 md5sum = b81c71668a2177d89690730fabd85d9e
[instance-profile] [instance-profile]
recipe = slapos.recipe.template recipe = slapos.recipe.template
......
...@@ -30,7 +30,7 @@ md5sum = 9f27195d770b2f57461c60a82c851ab9 ...@@ -30,7 +30,7 @@ md5sum = 9f27195d770b2f57461c60a82c851ab9
[instance-neo] [instance-neo]
filename = instance-neo.cfg.in filename = instance-neo.cfg.in
md5sum = 504b021715566e69ad664101f1b12a5c md5sum = fda911d5ef9efee365f1b0ff9843a50b
[template-neo-my-cnf] [template-neo-my-cnf]
filename = my.cnf.in filename = my.cnf.in
......
...@@ -3,9 +3,10 @@ ...@@ -3,9 +3,10 @@
{% set init_list = [] -%} {% set init_list = [] -%}
{% set private_tmpfs = slapparameter_dict.get('private-tmpfs') -%} {% set private_tmpfs = slapparameter_dict.get('private-tmpfs') -%}
{% set storage_count = slapparameter_dict.get('storage-count', 1) -%}
{% set storage_type = slapparameter_dict.get('storage-type') or ( {% set storage_type = slapparameter_dict.get('storage-type') or (
'MySQL' if mariadb_location is defined else 'SQLite') -%} 'MySQL' if mariadb_location is defined else 'SQLite') -%}
{% set mysql = storage_type != 'SQLite' -%} {% set mysql = storage_count and storage_type != 'SQLite' -%}
{% if mysql -%} {% if mysql -%}
[{{ section('mysqld') }}] [{{ section('mysqld') }}]
...@@ -113,7 +114,7 @@ engine = ${my-cnf-parameters:engine} ...@@ -113,7 +114,7 @@ engine = ${my-cnf-parameters:engine}
dedup = {{ dumps(bool(slapparameter_dict.get('data-deduplication'))) }} dedup = {{ dumps(bool(slapparameter_dict.get('data-deduplication'))) }}
disable-drop-partitions = {{ dumps(bool(slapparameter_dict.get('disable-drop-partitions'))) }} disable-drop-partitions = {{ dumps(bool(slapparameter_dict.get('disable-drop-partitions'))) }}
{% for i in range(slapparameter_dict.get('storage-count', 1)) -%} {% for i in range(storage_count) -%}
{% set storage_id = 'neo-storage-' ~ i -%} {% set storage_id = 'neo-storage-' ~ i -%}
[{{ section(storage_id) }}] [{{ section(storage_id) }}]
< = neo-storage < = neo-storage
......
...@@ -65,6 +65,10 @@ class NextCloudTestCase(InstanceTestCase): ...@@ -65,6 +65,10 @@ class NextCloudTestCase(InstanceTestCase):
self.nextcloud_path, self.nextcloud_path,
"Nextcloud path not found in %r" % (partition_path_list,)) "Nextcloud path not found in %r" % (partition_path_list,))
# lookup nextcloud partition ipv6
partition_id = os.path.basename(self.partition_dir)
self.nextcloud_ipv6 = self.getPartitionIPv6(partition_id)
# parse database info from mariadb url # parse database info from mariadb url
d = self.computer_partition.getConnectionParameterDict() d = self.computer_partition.getConnectionParameterDict()
db_url = d['mariadb-url-list'][2:-2] # parse <url> out of "['<url>']" db_url = d['mariadb-url-list'][2:-2] # parse <url> out of "['<url>']"
...@@ -86,9 +90,9 @@ class NextCloudTestCase(InstanceTestCase): ...@@ -86,9 +90,9 @@ class NextCloudTestCase(InstanceTestCase):
mail_smtpport="587", mail_smtpport="587",
mail_smtppassword="", mail_smtppassword="",
mail_smtpname="", mail_smtpname="",
cli_url="https://[%s]:9988/" % self._ipv6_address, cli_url="https://[%s]:9988/" % self.nextcloud_ipv6,
partition_dir=self.partition_dir, partition_dir=self.partition_dir,
trusted_domain_list=json.dumps(["[%s]:9988" % self._ipv6_address]), trusted_domain_list=json.dumps(["[%s]:9988" % self.nextcloud_ipv6]),
trusted_proxy_list=[], trusted_proxy_list=[],
) )
data_dict.update(config_dict) data_dict.update(config_dict)
...@@ -336,7 +340,7 @@ class TestNextCloudParameters(NextCloudTestCase): ...@@ -336,7 +340,7 @@ class TestNextCloudParameters(NextCloudTestCase):
cli_url="nextcloud.example.com", cli_url="nextcloud.example.com",
partition_dir=self.partition_dir, partition_dir=self.partition_dir,
trusted_domain_list=json.dumps([ trusted_domain_list=json.dumps([
"[%s]:9988" % self._ipv6_address, "[%s]:9988" % self.nextcloud_ipv6,
"nextcloud.example.com", "nextcloud.example.com",
"nextcloud.proxy.com" "nextcloud.proxy.com"
]), ]),
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
[template] [template]
filename = instance.cfg filename = instance.cfg
md5sum = 38bceddc77a5a44d69f0d9614909d1a2 md5sum = efcb5965bdf766d94b43c6ba2328275d
[amarisoft-stats.jinja2.py] [amarisoft-stats.jinja2.py]
_update_hash_filename_ = amarisoft-stats.jinja2.py _update_hash_filename_ = amarisoft-stats.jinja2.py
...@@ -36,11 +36,11 @@ md5sum = b34fe47a73890097fbc6ea6374aeb38d ...@@ -36,11 +36,11 @@ md5sum = b34fe47a73890097fbc6ea6374aeb38d
[template-enb] [template-enb]
_update_hash_filename_ = instance-enb.jinja2.cfg _update_hash_filename_ = instance-enb.jinja2.cfg
md5sum = 6382f871c0f4e7e965c95de7a959342a md5sum = 8b6e778ec21e03ef3f832ee420d7e83a
[template-gnb] [template-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg _update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = 4df8edfb9a8bcbdcc9740afb27a88928 md5sum = a94322f85f438dccba699e0f5ea09e3a
[template-core-network] [template-core-network]
_update_hash_filename_ = instance-core-network.jinja2.cfg _update_hash_filename_ = instance-core-network.jinja2.cfg
......
...@@ -276,6 +276,21 @@ ...@@ -276,6 +276,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)", "description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -5,6 +5,9 @@ parts = ...@@ -5,6 +5,9 @@ parts =
enb-config enb-config
enb-service enb-service
xamari-xlog-service xamari-xlog-service
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
xlog-fluentbit-service
{% endif %}
amarisoft-stats-service amarisoft-stats-service
amarisoft-rf-info-service amarisoft-rf-info-service
{% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %} {% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %}
...@@ -176,6 +179,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_} ...@@ -176,6 +179,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_}
command-line = ${xamari-xlog-script:output} command-line = ${xamari-xlog-script:output}
hash-files = ${:command-line} hash-files = ${:command-line}
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
[xlog-fluentbit-config]
recipe = slapos.recipe.template
output = ${directory:etc}/${:_buildout_section_name_}.cfg
logfile = ${xamari-xlog-script:logfile}
forward-host = {{ slapparameter_dict.get('xlog_fluentbit_forward_host', '') }}
forward-port = {{ slapparameter_dict.get('xlog_fluentbit_forward_port', '') }}
forward-shared-key = {{ slapparameter_dict.get('xlog_fluentbit_forward_shared_key', '') }}
forward-self-hostname = {{ ors_id['ors-id'] }}
inline =
[SERVICE]
flush 5
[INPUT]
name tail
path ${:logfile}
Read_from_Head True
[OUTPUT]
name forward
match *
Host ${:forward-host}
{%- if slapparameter_dict.get('xlog_fluentbit_forward_port') %}
Port ${:forward-port}
{%- endif %}
Shared_Key ${:forward-shared-key}
Self_Hostname ${:forward-self-hostname}
tls on
tls.verify off
[xlog-fluentbit-service]
recipe = slapos.cookbook:wrapper
fluentbit = {{ fluent_bit_location }}/bin/fluent-bit
fluentbit-config = ${xlog-fluentbit-config:output}
command-line = ${:fluentbit} -c ${:fluentbit-config}
wrapper-path = ${directory:service}/${:_buildout_section_name_}
hash-files = ${:fluentbit-config}
{% endif %}
[amarisoft-stats-template] [amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do extensions = jinja2.ext.do
......
...@@ -274,6 +274,21 @@ ...@@ -274,6 +274,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)", "description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -316,6 +316,21 @@ ...@@ -316,6 +316,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail", "description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -328,6 +328,21 @@ ...@@ -328,6 +328,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail", "description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -5,6 +5,9 @@ parts = ...@@ -5,6 +5,9 @@ parts =
gnb-config gnb-config
enb-service enb-service
xamari-xlog-service xamari-xlog-service
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
xlog-fluentbit-service
{% endif %}
amarisoft-stats-service amarisoft-stats-service
amarisoft-rf-info-service amarisoft-rf-info-service
{% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %} {% if slapparameter_dict.get('rrh', '') == "Lopcomm ORAN" %}
...@@ -153,6 +156,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_} ...@@ -153,6 +156,43 @@ wrapper-path = ${directory:service}/${:_buildout_section_name_}
command-line = ${xamari-xlog-script:output} command-line = ${xamari-xlog-script:output}
hash-files = ${:command-line} hash-files = ${:command-line}
{% if slapparameter_dict.get('xlog_fluentbit_forward_host') %}
[xlog-fluentbit-config]
recipe = slapos.recipe.template
output = ${directory:etc}/${:_buildout_section_name_}.cfg
logfile = ${xamari-xlog-script:logfile}
forward-host = {{ slapparameter_dict.get('xlog_fluentbit_forward_host', '') }}
forward-port = {{ slapparameter_dict.get('xlog_fluentbit_forward_port', '') }}
forward-shared-key = {{ slapparameter_dict.get('xlog_fluentbit_forward_shared_key', '') }}
forward-self-hostname = {{ ors_id['ors-id'] }}
inline =
[SERVICE]
flush 5
[INPUT]
name tail
path ${:logfile}
Read_from_Head True
[OUTPUT]
name forward
match *
Host ${:forward-host}
{%- if slapparameter_dict.get('xlog_fluentbit_forward_port') %}
Port ${:forward-port}
{%- endif %}
Shared_Key ${:forward-shared-key}
Self_Hostname ${:forward-self-hostname}
tls on
tls.verify off
[xlog-fluentbit-service]
recipe = slapos.cookbook:wrapper
fluentbit = {{ fluent_bit_location }}/bin/fluent-bit
fluentbit-config = ${xlog-fluentbit-config:output}
command-line = ${:fluentbit} -c ${:fluentbit-config}
wrapper-path = ${directory:service}/${:_buildout_section_name_}
hash-files = ${:fluentbit-config}
{% endif %}
[amarisoft-stats-template] [amarisoft-stats-template]
recipe = slapos.recipe.template:jinja2 recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do extensions = jinja2.ext.do
......
...@@ -172,6 +172,21 @@ ...@@ -172,6 +172,21 @@
"description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)", "description": "Baseband latency promise will fail if average TX/RX diff reaches threshold (higher than this value)",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -326,6 +326,21 @@ ...@@ -326,6 +326,21 @@
"description": "Average TX/RX diff threshold above which baseband latency promise will fail", "description": "Average TX/RX diff threshold above which baseband latency promise will fail",
"type": "number", "type": "number",
"default": 7 "default": 7
},
"xlog_fluentbit_forward_host": {
"title": "Address to Forward Xlog by Fluenbit",
"description": "Address of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_port": {
"title": "Port to Forward Xlog by Fluentbit",
"description": "Optional Port of Remote Fluentd or Fluentbit Server to Forward Xlog",
"type": "string"
},
"xlog_fluentbit_forward_shared_key": {
"title": "Shared Key to Forward Xlog by Fluentbit",
"description": "Secret Key Shared with Remote Fluentd or Fluentbit Server for Authentication when Forwarding Xlog",
"type": "string"
} }
} }
} }
...@@ -171,6 +171,16 @@ init = ...@@ -171,6 +171,16 @@ init =
except: except:
options['ipv4'] = "0.0.0.0" options['ipv4'] = "0.0.0.0"
[ors-id]
recipe = slapos.recipe.build
computer = $${slap-connection:computer-id}
title = $${slap-configuration:root-instance-title}
init =
import socket
options['hostname'] = socket.gethostname()
ors_id = '__'.join(options[x] for x in ('hostname', 'computer', 'title'))
options['ors-id'] = ors_id
[switch-softwaretype] [switch-softwaretype]
recipe = slapos.cookbook:switch-softwaretype recipe = slapos.cookbook:switch-softwaretype
enb = dynamic-template-enb:output enb = dynamic-template-enb:output
...@@ -201,6 +211,7 @@ extensions = jinja2.ext.do ...@@ -201,6 +211,7 @@ extensions = jinja2.ext.do
extra-context = extra-context =
raw monitor_template ${monitor2-template:output} raw monitor_template ${monitor2-template:output}
section ors_version ors-version section ors_version ors-version
section ors_id ors-id
key enb amarisoft:enb key enb amarisoft:enb
key sdr amarisoft:sdr key sdr amarisoft:sdr
raw enb_template ${enb.jinja2.cfg:target} raw enb_template ${enb.jinja2.cfg:target}
...@@ -221,6 +232,7 @@ extra-context = ...@@ -221,6 +232,7 @@ extra-context =
raw dnsmasq_template ${dnsmasq.jinja2.cfg:target} raw dnsmasq_template ${dnsmasq.jinja2.cfg:target}
raw dnsmasq_location ${dnsmasq:location} raw dnsmasq_location ${dnsmasq:location}
key dnsmasq_config_path dnsmasq-config:output key dnsmasq_config_path dnsmasq-config:output
raw fluent_bit_location ${fluent-bit:location}
[dynamic-template-gnb] [dynamic-template-gnb]
< = jinja2-template-base < = jinja2-template-base
...@@ -230,6 +242,7 @@ extensions = jinja2.ext.do ...@@ -230,6 +242,7 @@ extensions = jinja2.ext.do
extra-context = extra-context =
raw monitor_template ${monitor2-template:output} raw monitor_template ${monitor2-template:output}
section ors_version ors-version section ors_version ors-version
section ors_id ors-id
key enb amarisoft:enb key enb amarisoft:enb
key sdr amarisoft:sdr key sdr amarisoft:sdr
raw gnb_template ${gnb.jinja2.cfg:target} raw gnb_template ${gnb.jinja2.cfg:target}
...@@ -244,6 +257,7 @@ extra-context = ...@@ -244,6 +257,7 @@ extra-context =
raw default_n_antenna_ul ${default-params:default-n-antenna-ul} raw default_n_antenna_ul ${default-params:default-n-antenna-ul}
raw rf_mode ${rf-mode:rf-mode} raw rf_mode ${rf-mode:rf-mode}
raw python_path ${python3:location} raw python_path ${python3:location}
raw fluent_bit_location ${fluent-bit:location}
[dynamic-template-core-network] [dynamic-template-core-network]
< = jinja2-template-base < = jinja2-template-base
......
...@@ -13,6 +13,7 @@ extends = ...@@ -13,6 +13,7 @@ extends =
../../component/pygolang/buildout.cfg ../../component/pygolang/buildout.cfg
../../component/git/buildout.cfg ../../component/git/buildout.cfg
../../component/dnsmasq/buildout.cfg ../../component/dnsmasq/buildout.cfg
../../component/fluent-bit/buildout.cfg
parts += parts +=
template template
......
...@@ -37,7 +37,6 @@ import urllib ...@@ -37,7 +37,6 @@ import urllib
from slapos.recipe.librecipe import generateHashFromFiles from slapos.recipe.librecipe import generateHashFromFiles
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
skip = unittest.skip('port conflit between powerdns instances') skip = unittest.skip('port conflit between powerdns instances')
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass( setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
...@@ -93,9 +92,10 @@ class PowerDNSTestCase(SlapOSInstanceTestCase): ...@@ -93,9 +92,10 @@ class PowerDNSTestCase(SlapOSInstanceTestCase):
ns_record = [] ns_record = []
for replicate_nb in range(1, dns_quantity + 1): for replicate_nb in range(1, dns_quantity + 1):
ns_id = 'ns%s' % replicate_nb ns_id = 'ns%s' % replicate_nb
partition_id = self.getPartitionId(ns_id)
ns_record.append(ns_id + '.' + self.default_supported_zone) ns_record.append(ns_id + '.' + self.default_supported_zone)
expected_dict[ns_id + '-port'] = str(DNS_PORT) expected_dict[ns_id + '-port'] = str(DNS_PORT)
expected_dict[ns_id + '-ipv6'] = self._ipv6_address expected_dict[ns_id + '-ipv6'] = self.getPartitionIPv6(partition_id)
expected_dict['ns-record'] = ','.join(ns_record) expected_dict['ns-record'] = ','.join(ns_record)
expected_dict['slave-amount'] = str(slave_amount) expected_dict['slave-amount'] = str(slave_amount)
...@@ -199,11 +199,11 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase): ...@@ -199,11 +199,11 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
).getConnectionParameterDict()) ).getConnectionParameterDict())
return parameter_dict_list return parameter_dict_list
def dns_query(self, domain_name, subnet): def dns_query(self, domain_name, subnet, ipv6, port):
message = dns.message.make_query(domain_name, 'A') message = dns.message.make_query(domain_name, 'A')
client_subnet_option = dns.edns.ECSOption(subnet) client_subnet_option = dns.edns.ECSOption(subnet)
message.use_edns(options=[client_subnet_option]) message.use_edns(options=[client_subnet_option])
answer = dns.query.udp(message, self._ipv6_address, port=DNS_PORT) answer = dns.query.udp(message, ipv6, port=port)
return answer.get_rrset( return answer.get_rrset(
dns.message.ANSWER, dns.message.ANSWER,
dns.name.from_text(domain_name), dns.name.from_text(domain_name),
...@@ -211,8 +211,9 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase): ...@@ -211,8 +211,9 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
dns.rdatatype.CNAME dns.rdatatype.CNAME
).to_text().split()[-1] ).to_text().split()[-1]
def _test_dns_resolver(self): def _test_dns_resolver(self, dns_quantity):
slave_parameter_dict_dict = self.getSlaveParameterDictDict() slave_parameter_dict_dict = self.getSlaveParameterDictDict()
connection_dict = self.computer_partition.getConnectionParameterDict()
subnet_dict = { subnet_dict = {
'africa': AFRICAN_SUBNET, 'africa': AFRICAN_SUBNET,
'china-telecom': CHINA_TELECOM_SUBNET, 'china-telecom': CHINA_TELECOM_SUBNET,
...@@ -248,21 +249,27 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase): ...@@ -248,21 +249,27 @@ class PowerDNSSlaveTestCase(PowerDNSTestCase):
slave_parameter_dict['record'], slave_parameter_dict['applicable-zone'] slave_parameter_dict['record'], slave_parameter_dict['applicable-zone']
) )
for region in subnet_dict: for region in subnet_dict:
self.assertEqual( for replicate_nb in range(1, dns_quantity + 1):
slave_parameter_dict.get( ns_id = 'ns%s' % replicate_nb
region, self.assertEqual(
'%s.%s.' % ( slave_parameter_dict.get(
default_rr_dict[region], slave_parameter_dict['origin']) region,
), '%s.%s.' % (
self.dns_query(domain_name, subnet_dict[region]) default_rr_dict[region], slave_parameter_dict['origin'])
) ),
self.dns_query(
domain_name,
subnet_dict[region],
connection_dict[ns_id + '-ipv6'],
int(connection_dict[ns_id + '-port']))
)
def _test_slaves(self, dns_quantity=1): def _test_slaves(self, dns_quantity=1):
self._test_parameter_dict( self._test_parameter_dict(
dns_quantity=dns_quantity, dns_quantity=dns_quantity,
slave_amount=len(self.getSlaveParameterDictDict()) slave_amount=len(self.getSlaveParameterDictDict())
) )
self._test_dns_resolver() self._test_dns_resolver(dns_quantity)
class TestSlaveRequest(PowerDNSSlaveTestCase): class TestSlaveRequest(PowerDNSSlaveTestCase):
...@@ -421,7 +428,7 @@ class TestSlaveRequestDomains(TestSlaveRequest): ...@@ -421,7 +428,7 @@ class TestSlaveRequestDomains(TestSlaveRequest):
} }
# Because all powerdns instances run under the same ip address during tests, # Because all powerdns instances run under the same ipv4 address during tests,
# there is a port conflict between these instances # there is a port conflict between these instances
@skip @skip
class TestMultipleInstances(TestSlaveRequestDomains): class TestMultipleInstances(TestSlaveRequestDomains):
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
[instance-profile] [instance-profile]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 39cdfbd3bdfcd48eddb4132ff9dcda62 md5sum = 7f9749ab75475bd5d98be27a570c7731
[instance-default] [instance-default]
filename = instance-default.cfg.in filename = instance-default.cfg.in
......
...@@ -18,12 +18,12 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename} ...@@ -18,12 +18,12 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:filename}
extensions = jinja2.ext.do extensions = jinja2.ext.do
context = context =
key slapparameter_dict slap-configuration:configuration key slapparameter_dict slap-configuration:configuration
raw software_parts_directory {{ buildout['parts-directory'] }} raw software_parts_directory {{ buildout['parts-directory'] }}
raw proftpd_bin {{ proftpd_bin }} raw proftpd_bin {{ proftpd_bin }}
raw ftpasswd_bin {{ ftpasswd_bin }} raw ftpasswd_bin {{ ftpasswd_bin }}
raw ftpdctl_bin {{ ftpdctl_bin }} raw ftpdctl_bin {{ ftpdctl_bin }}
raw ssh_keygen_bin {{ ssh_keygen_bin }} raw ssh_keygen_bin {{ ssh_keygen_bin }}
raw template_monitor {{ template_monitor }} raw template_monitor {{ template_monitor }}
[instance-default] [instance-default]
<= instance-template <= instance-template
......
...@@ -85,6 +85,7 @@ KEDIFA_PORT = '15080' ...@@ -85,6 +85,7 @@ KEDIFA_PORT = '15080'
# IP to originate requests from # IP to originate requests from
# has to be not partition one # has to be not partition one
SOURCE_IP = '127.0.0.1' SOURCE_IP = '127.0.0.1'
SOURCE_IPV6 = '::1'
# IP on which test run, in order to mimic HTTP[s] access # IP on which test run, in order to mimic HTTP[s] access
TEST_IP = os.environ['SLAPOS_TEST_IPV4'] TEST_IP = os.environ['SLAPOS_TEST_IPV4']
...@@ -321,7 +322,11 @@ class TestDataMixin(object): ...@@ -321,7 +322,11 @@ class TestDataMixin(object):
for replacement in sorted(data_replacement_dict.keys()): for replacement in sorted(data_replacement_dict.keys()):
value = data_replacement_dict[replacement] value = data_replacement_dict[replacement]
runtime_data = runtime_data.replace(value, replacement) if isinstance(value, list):
for v in value:
runtime_data = runtime_data.replace(v, replacement)
else:
runtime_data = runtime_data.replace(value, replacement)
longMessage = self.longMessage longMessage = self.longMessage
self.longMessage = True self.longMessage = True
...@@ -439,12 +444,15 @@ class TestDataMixin(object): ...@@ -439,12 +444,15 @@ class TestDataMixin(object):
# sent like this to the real master # sent like this to the real master
parameter_dict['_'] = json.loads(parameter_dict['_']) parameter_dict['_'] = json.loads(parameter_dict['_'])
parameter_dict['timestamp'] = '@@TIMESTAMP@@' parameter_dict['timestamp'] = '@@TIMESTAMP@@'
# remove ip_list since it's unused and the order may be unstable
parameter_dict.pop('ip_list', None)
cluster_request_parameter_list.append(parameter_dict) cluster_request_parameter_list.append(parameter_dict)
# XXX: Dirty decode/encode/decode...? # XXX: Dirty decode/encode/decode...?
data_replacement_dict = { data_replacement_dict = {
'@@_ipv4_address@@': self._ipv4_address, '@@_ipv4_address@@': self._ipv4_address,
'@@_ipv6_address@@': self._ipv6_address, '@@_ipv6_address@@': [
self.master_ipv6, self.kedifa_ipv6, self.caddy_frontend1_ipv6],
'@@_server_http_port@@': str(self._server_http_port), '@@_server_http_port@@': str(self._server_http_port),
'@@_server_https_auth_port@@': str(self._server_https_auth_port), '@@_server_https_auth_port@@': str(self._server_https_auth_port),
'@@_server_https_port@@': str(self._server_https_port), '@@_server_https_port@@': str(self._server_https_port),
...@@ -736,7 +744,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -736,7 +744,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.backend_url = 'http://%s:%s/' % server.server_address cls.backend_url = 'http://%s:%s/' % server.server_address
server_process = multiprocessing.Process( server_process = multiprocessing.Process(
target=server.serve_forever, name='HTTPServer') target=server.serve_forever, name='HTTPServer', daemon=True)
server_process.start() server_process.start()
# from now on, socket is used by server subprocess, we can close it # from now on, socket is used by server subprocess, we can close it
server.socket.close() server.socket.close()
...@@ -744,7 +752,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -744,7 +752,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.backend_https_url = 'https://%s:%s/' % server_https.server_address cls.backend_https_url = 'https://%s:%s/' % server_https.server_address
server_https_process = multiprocessing.Process( server_https_process = multiprocessing.Process(
target=server_https.serve_forever, name='HTTPSServer') target=server_https.serve_forever, name='HTTPSServer', daemon=True)
server_https_process.start() server_https_process.start()
server_https.socket.close() server_https.socket.close()
cls.logger.debug('Started process %s' % (server_https_process,)) cls.logger.debug('Started process %s' % (server_https_process,))
...@@ -756,7 +764,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -756,7 +764,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
(cls._ipv4_address, cls._server_netloc_a_http_port), (cls._ipv4_address, cls._server_netloc_a_http_port),
NetlocHandler) NetlocHandler)
netloc_a_http_process = multiprocessing.Process( netloc_a_http_process = multiprocessing.Process(
target=netloc_a_http.serve_forever, name='netloc-a-http') target=netloc_a_http.serve_forever, name='netloc-a-http', daemon=True)
netloc_a_http_process.start() netloc_a_http_process.start()
netloc_a_http.socket.close() netloc_a_http.socket.close()
...@@ -764,7 +772,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -764,7 +772,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
(cls._ipv4_address, cls._server_netloc_b_http_port), (cls._ipv4_address, cls._server_netloc_b_http_port),
NetlocHandler) NetlocHandler)
netloc_b_http_process = multiprocessing.Process( netloc_b_http_process = multiprocessing.Process(
target=netloc_b_http.serve_forever, name='netloc-b-http') target=netloc_b_http.serve_forever, name='netloc-b-http', daemon=True)
netloc_b_http_process.start() netloc_b_http_process.start()
netloc_b_http.socket.close() netloc_b_http.socket.close()
...@@ -821,7 +829,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -821,7 +829,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
% server_https_auth.server_address % server_https_auth.server_address
self.server_https_auth_process = multiprocessing.Process( self.server_https_auth_process = multiprocessing.Process(
target=server_https_auth.serve_forever, name='HTTPSServerAuth') target=server_https_auth.serve_forever, name='HTTPSServerAuth', daemon=True)
self.server_https_auth_process.start() self.server_https_auth_process.start()
server_https_auth.socket.close() server_https_auth.socket.close()
self.logger.debug('Started process %s' % (self.server_https_auth_process,)) self.logger.debug('Started process %s' % (self.server_https_auth_process,))
...@@ -1063,7 +1071,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1063,7 +1071,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
generate_auth_url = parameter_dict.pop('%skey-generate-auth-url' % ( generate_auth_url = parameter_dict.pop('%skey-generate-auth-url' % (
prefix,)) prefix,))
upload_url = parameter_dict.pop('%skey-upload-url' % (prefix,)) upload_url = parameter_dict.pop('%skey-upload-url' % (prefix,))
kedifa_ipv6_base = 'https://[%s]:%s' % (self._ipv6_address, KEDIFA_PORT) kedifa_ipv6_base = 'https://[%s]:%s' % (self.kedifa_ipv6, KEDIFA_PORT)
base = '^' + kedifa_ipv6_base.replace( base = '^' + kedifa_ipv6_base.replace(
'[', r'\[').replace(']', r'\]') + '/.{32}' '[', r'\[').replace(']', r'\]') + '/.{32}'
self.assertRegex( self.assertRegex(
...@@ -1078,7 +1086,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1078,7 +1086,7 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
kedifa_caucase_url = parameter_dict.pop('kedifa-caucase-url') kedifa_caucase_url = parameter_dict.pop('kedifa-caucase-url')
self.assertEqual( self.assertEqual(
kedifa_caucase_url, kedifa_caucase_url,
'http://[%s]:%s' % (self._ipv6_address, CAUCASE_PORT), 'http://[%s]:%s' % (self.kedifa_ipv6, CAUCASE_PORT),
) )
return generate_auth_url, upload_url return generate_auth_url, upload_url
...@@ -1244,6 +1252,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase): ...@@ -1244,6 +1252,13 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.setUp = lambda self: self.fail('Setup Class failed.') cls.setUp = lambda self: self.fail('Setup Class failed.')
raise raise
# Lookup partitions IPv6
cls.master_ipv6 = cls.computer_partition_ipv6_address
kedifa_partition = cls.getPartitionId('kedifa')
cls.kedifa_ipv6 = cls.getPartitionIPv6(kedifa_partition)
caddy_frontend1_partition = cls.getPartitionId('caddy-frontend-1')
cls.caddy_frontend1_ipv6 = cls.getPartitionIPv6(caddy_frontend1_partition)
class SlaveHttpFrontendTestCase(HttpFrontendTestCase): class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
def _get_backend_haproxy_configuration(self): def _get_backend_haproxy_configuration(self):
...@@ -1380,7 +1395,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase): ...@@ -1380,7 +1395,7 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
'url': 'http://%s.example.com' % (hostname, ), 'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ), 'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ), 'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
}) })
self.assertEqual( self.assertEqual(
expected_parameter_dict, expected_parameter_dict,
...@@ -1429,8 +1444,8 @@ class TestMasterRequestDomain(HttpFrontendTestCase, TestDataMixin): ...@@ -1429,8 +1444,8 @@ class TestMasterRequestDomain(HttpFrontendTestCase, TestDataMixin):
self.assertEqual( self.assertEqual(
{ {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '0', 'accepted-slave-amount': '0',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -1461,8 +1476,8 @@ class TestMasterRequest(HttpFrontendTestCase, TestDataMixin): ...@@ -1461,8 +1476,8 @@ class TestMasterRequest(HttpFrontendTestCase, TestDataMixin):
self.assertNodeInformationWithPop(parameter_dict) self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual( self.assertEqual(
{ {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'None', 'domain': 'None',
'accepted-slave-amount': '0', 'accepted-slave-amount': '0',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -1503,8 +1518,11 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest( ...@@ -1503,8 +1518,11 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
_, kedifa_key_pem, _, kedifa_csr_pem = createCSR('Kedifa User') _, kedifa_key_pem, _, kedifa_csr_pem = createCSR('Kedifa User')
_, backend_client_key_pem, _, backend_client_csr_pem = createCSR( _, backend_client_key_pem, _, backend_client_csr_pem = createCSR(
'Backend Client User') 'Backend Client User')
parameter_dict = cls.requestDefaultInstance( cls.computer_partition = cls.requestDefaultInstance()
).getConnectionParameterDict() # Compute IPv6 here since super()._setUpClass failed
cls.computer_partition_ipv6_address = cls.getPartitionIPv6(
cls.computer_partition.getId())
parameter_dict = cls.computer_partition.getConnectionParameterDict()
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict) cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
cls._fetchBackendClientCaCertificateFile(parameter_dict) cls._fetchBackendClientCaCertificateFile(parameter_dict)
with open(cls.kedifa_caucase_ca_certificate_file) as fh: with open(cls.kedifa_caucase_ca_certificate_file) as fh:
...@@ -1577,8 +1595,8 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest( ...@@ -1577,8 +1595,8 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
self.assertNodeInformationWithPop(parameter_dict) self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual( self.assertEqual(
{ {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'None', 'domain': 'None',
'accepted-slave-amount': '0', 'accepted-slave-amount': '0',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -1937,34 +1955,36 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -1937,34 +1955,36 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
monitor_setup_url_key = 'monitor-setup-url' monitor_setup_url_key = 'monitor-setup-url'
def test_monitor_setup(self): def test_monitor_setup(self):
IP = self._ipv6_address MASTER_IP = self.master_ipv6
KEDIFA_IP = self.kedifa_ipv6
CADDY_IP = self.caddy_frontend1_ipv6
self.monitor_configuration_list = [ self.monitor_configuration_list = [
{ {
'htmlUrl': 'https://[%s]:8401/public/feed' % (IP,), 'htmlUrl': 'https://[%s]:8401/public/feed' % (MASTER_IP,),
'text': 'testing partition 0', 'text': 'testing partition 0',
'title': 'testing partition 0', 'title': 'testing partition 0',
'type': 'rss', 'type': 'rss',
'url': 'https://[%s]:8401/share/private/' % (IP,), 'url': 'https://[%s]:8401/share/private/' % (MASTER_IP,),
'version': 'RSS', 'version': 'RSS',
'xmlUrl': 'https://[%s]:8401/public/feed' % (IP,), 'xmlUrl': 'https://[%s]:8401/public/feed' % (MASTER_IP,),
}, },
{ {
'htmlUrl': 'https://[%s]:8402/public/feed' % (IP,), 'htmlUrl': 'https://[%s]:8402/public/feed' % (KEDIFA_IP,),
'text': 'kedifa', 'text': 'kedifa',
'title': 'kedifa', 'title': 'kedifa',
'type': 'rss', 'type': 'rss',
'url': 'https://[%s]:8402/share/private/' % (IP,), 'url': 'https://[%s]:8402/share/private/' % (KEDIFA_IP,),
'version': 'RSS', 'version': 'RSS',
'xmlUrl': 'https://[%s]:8402/public/feed' % (IP,), 'xmlUrl': 'https://[%s]:8402/public/feed' % (KEDIFA_IP,),
}, },
{ {
'htmlUrl': 'https://[%s]:8411/public/feed' % (IP,), 'htmlUrl': 'https://[%s]:8411/public/feed' % (CADDY_IP,),
'text': 'caddy-frontend-1', 'text': 'caddy-frontend-1',
'title': 'caddy-frontend-1', 'title': 'caddy-frontend-1',
'type': 'rss', 'type': 'rss',
'url': 'https://[%s]:8411/share/private/' % (IP,), 'url': 'https://[%s]:8411/share/private/' % (CADDY_IP,),
'version': 'RSS', 'version': 'RSS',
'xmlUrl': 'https://[%s]:8411/public/feed' % (IP,), 'xmlUrl': 'https://[%s]:8411/public/feed' % (CADDY_IP,),
}, },
] ]
connection_parameter_dict = self\ connection_parameter_dict = self\
...@@ -2098,8 +2118,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -2098,8 +2118,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
self.assertNodeInformationWithPop(parameter_dict) self.assertNodeInformationWithPop(parameter_dict)
expected_parameter_dict = { expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '62', 'accepted-slave-amount': '62',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -2429,15 +2449,15 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -2429,15 +2449,15 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
# check out access via IPv6 # check out access via IPv6
out_ipv6, err_ipv6 = self._curl( out_ipv6, err_ipv6 = self._curl(
parameter_dict['domain'], self._ipv6_address, HTTPS_PORT) parameter_dict['domain'], self.caddy_frontend1_ipv6, HTTPS_PORT,
source_ip=SOURCE_IPV6)
try: try:
j = json.loads(out_ipv6.decode()) j = json.loads(out_ipv6.decode())
except Exception: except Exception:
raise ValueError('JSON decode problem in:\n%s' % (out_ipv6.decode(),)) raise ValueError('JSON decode problem in:\n%s' % (out_ipv6.decode(),))
self.assertEqual( self.assertEqual(
self._ipv6_address, SOURCE_IPV6,
j['Incoming Headers']['x-forwarded-for'] j['Incoming Headers']['x-forwarded-for']
) )
def test_url_netloc_list(self): def test_url_netloc_list(self):
...@@ -4618,7 +4638,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -4618,7 +4638,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
result.headers['Location'] result.headers['Location']
) )
def _curl(self, domain, ip, port, cookie=None): def _curl(self, domain, ip, port, cookie=None, source_ip=None):
replacement_dict = dict( replacement_dict = dict(
domain=domain, ip=ip, port=port) domain=domain, ip=ip, port=port)
curl_command = [ curl_command = [
...@@ -4628,6 +4648,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -4628,6 +4648,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
] ]
if cookie is not None: if cookie is not None:
curl_command.extend(['--cookie', cookie]) curl_command.extend(['--cookie', cookie])
if source_ip is not None:
curl_command.extend(['--interface', source_ip])
curl_command.extend([ curl_command.extend([
'https://%(domain)s:%(port)s/' % replacement_dict]) 'https://%(domain)s:%(port)s/' % replacement_dict])
prc = subprocess.Popen( prc = subprocess.Popen(
...@@ -4797,7 +4819,47 @@ class TestEnableHttp2ByDefaultFalseSlave(TestSlave): ...@@ -4797,7 +4819,47 @@ class TestEnableHttp2ByDefaultFalseSlave(TestSlave):
test_enable_http3_false_http_version = '1' test_enable_http3_false_http_version = '1'
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
class ReplicateSlaveMixin(object):
def frontends1And2HaveDifferentIPv6(self):
_, *prefixlen = self._ipv6_address.split('/')
return bool(prefixlen and int(prefixlen[0]) < 127)
def requestSecondFrontend(self, final_state='stopped'):
ipv6_collision = not self.frontends1And2HaveDifferentIPv6()
# now instantiate 2nd partition in started state
# and due to port collision, stop the first one...
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.requestSlaves()
try:
self.slap.waitForInstance(self.instance_max_retry)
except Exception:
if ipv6_collision:
raise
# for now, accept failing promise due to stopped frontend
finally:
# ...and be nice, put back the first one online
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': final_state,
})
self.requestDefaultInstance()
for _ in range(3):
try:
self.slap.waitForInstance(self.instance_max_retry)
except Exception:
if ipv6_collision:
raise
# for now, accept failing promise due to stopped frontend
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin, ReplicateSlaveMixin):
instance_parameter_dict = { instance_parameter_dict = {
'domain': 'example.com', 'domain': 'example.com',
'port': HTTPS_PORT, 'port': HTTPS_PORT,
...@@ -4819,27 +4881,12 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -4819,27 +4881,12 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
}, },
} }
def frontends1And2HaveDifferentIPv6(self):
_, *prefixlen = self._ipv6_address.split('/')
return bool(prefixlen and int(prefixlen[0]) < 127)
def test(self): def test(self):
# now instantiate 2nd partition in started state self.requestSecondFrontend()
# and due to port collision, stop the first one...
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.requestSlaves()
self.slap.waitForInstance(self.instance_max_retry)
# ...and be nice, put back the first one online
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': 'stopped',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.updateSlaveConnectionParameterDictDict() self.updateSlaveConnectionParameterDictDict()
# the real assertions follow... # the real assertions follow...
...@@ -4871,7 +4918,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -4871,7 +4918,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
'url': 'http://replicate.example.com', 'url': 'http://replicate.example.com',
'site_url': 'http://replicate.example.com', 'site_url': 'http://replicate.example.com',
'secure_access': 'https://replicate.example.com', 'secure_access': 'https://replicate.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
}, },
parameter_dict parameter_dict
) )
...@@ -4906,7 +4953,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin): ...@@ -4906,7 +4953,7 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
) )
class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase): class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase, ReplicateSlaveMixin):
instance_parameter_dict = { instance_parameter_dict = {
'domain': 'example.com', 'domain': 'example.com',
'port': HTTPS_PORT, 'port': HTTPS_PORT,
...@@ -4929,27 +4976,7 @@ class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase): ...@@ -4929,27 +4976,7 @@ class TestReplicateSlaveOtherDestroyed(SlaveHttpFrontendTestCase):
} }
def test_extra_slave_instance_list_not_present_destroyed_request(self): def test_extra_slave_instance_list_not_present_destroyed_request(self):
# now instantiate 2nd partition in started state self.requestSecondFrontend(final_state='destroyed')
# and due to port collision, stop the first one
self.instance_parameter_dict.update({
'-frontend-quantity': 2,
'-sla-2-computer_guid': self.slap._computer_id,
'-frontend-1-state': 'stopped',
'-frontend-2-state': 'started',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
# now start back first instance, and destroy 2nd one
self.instance_parameter_dict.update({
'-frontend-1-state': 'started',
'-frontend-2-state': 'destroyed',
})
self.requestDefaultInstance()
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
self.slap.waitForInstance(self.instance_max_retry)
buildout_file = os.path.join( buildout_file = os.path.join(
self.getMasterPartitionPath(), 'instance-master.cfg') self.getMasterPartitionPath(), 'instance-master.cfg')
...@@ -5331,8 +5358,8 @@ class TestSlaveSlapOSMasterCertificateCompatibility( ...@@ -5331,8 +5358,8 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict) self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = { expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '12', 'accepted-slave-amount': '12',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -5838,8 +5865,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate( ...@@ -5838,8 +5865,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict) self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = { expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '1', 'accepted-slave-amount': '1',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -5930,8 +5957,8 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -5930,8 +5957,8 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict) self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = { expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '2', 'accepted-slave-amount': '2',
'rejected-slave-amount': '0', 'rejected-slave-amount': '0',
...@@ -6192,8 +6219,8 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6192,8 +6219,8 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
self.assertRejectedSlavePromiseWithPop(parameter_dict) self.assertRejectedSlavePromiseWithPop(parameter_dict)
expected_parameter_dict = { expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address, 'monitor-base-url': 'https://[%s]:8401' % self.master_ipv6,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
'domain': 'example.com', 'domain': 'example.com',
'accepted-slave-amount': '3', 'accepted-slave-amount': '3',
'rejected-slave-amount': '28', 'rejected-slave-amount': '28',
...@@ -6428,7 +6455,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase): ...@@ -6428,7 +6455,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
'url': 'http://defaultpathunsafe.example.com', 'url': 'http://defaultpathunsafe.example.com',
'site_url': 'http://defaultpathunsafe.example.com', 'site_url': 'http://defaultpathunsafe.example.com',
'secure_access': 'https://defaultpathunsafe.example.com', 'secure_access': 'https://defaultpathunsafe.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address, 'backend-client-caucase-url': 'http://[%s]:8990' % self.master_ipv6,
}, },
parameter_dict parameter_dict
) )
...@@ -6722,8 +6749,8 @@ class TestPassedRequestParameter(HttpFrontendTestCase): ...@@ -6722,8 +6749,8 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
'kedifa'].pop('monitor-password') 'kedifa'].pop('monitor-password')
) )
backend_client_caucase_url = 'http://[%s]:8990' % (self._ipv6_address,) backend_client_caucase_url = 'http://[%s]:8990' % (self.master_ipv6,)
kedifa_caucase_url = 'http://[%s]:15090' % (self._ipv6_address,) kedifa_caucase_url = 'http://[%s]:15090' % (self.kedifa_ipv6,)
expected_partition_parameter_dict_dict = { expected_partition_parameter_dict_dict = {
'caddy-frontend-1': { 'caddy-frontend-1': {
'X-software_release_url': base_software_url, 'X-software_release_url': base_software_url,
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -80,16 +70,6 @@ ...@@ -80,16 +70,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -119,16 +99,6 @@ ...@@ -119,16 +99,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -5,16 +5,6 @@ ...@@ -5,16 +5,6 @@
"enable-http2-by-default": "false", "enable-http2-by-default": "false",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -844,16 +834,6 @@ ...@@ -844,16 +834,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -885,16 +865,6 @@ ...@@ -885,16 +865,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -5,16 +5,6 @@ ...@@ -5,16 +5,6 @@
"caucase_port": "15090", "caucase_port": "15090",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -39,16 +29,6 @@ ...@@ -39,16 +29,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -77,16 +57,6 @@ ...@@ -77,16 +57,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -3,16 +3,6 @@ ...@@ -3,16 +3,6 @@
"caucase_port": "15090", "caucase_port": "15090",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -37,16 +27,6 @@ ...@@ -37,16 +27,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -75,16 +55,6 @@ ...@@ -75,16 +55,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -38,16 +28,6 @@ ...@@ -38,16 +28,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -77,16 +57,6 @@ ...@@ -77,16 +57,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -52,16 +42,6 @@ ...@@ -52,16 +42,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -91,16 +71,6 @@ ...@@ -91,16 +71,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -52,16 +42,6 @@ ...@@ -52,16 +42,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -91,16 +71,6 @@ ...@@ -91,16 +71,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -8,16 +8,6 @@ ...@@ -8,16 +8,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -56,16 +46,6 @@ ...@@ -56,16 +46,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -95,16 +75,6 @@ ...@@ -95,16 +75,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
...@@ -134,16 +104,6 @@ ...@@ -134,16 +104,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-2", "instance_title": "caddy-frontend-2",
"ip_list": [
[
"T-3",
"@@_ipv4_address@@"
],
[
"T-3",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-3", "slap_computer_partition_id": "T-3",
......
...@@ -38,23 +38,3 @@ T-2:slave-instrospection-nginx-{hash-generic}-on-watch RUNNING ...@@ -38,23 +38,3 @@ T-2:slave-instrospection-nginx-{hash-generic}-on-watch RUNNING
T-2:slave-introspection-safe-graceful EXITED T-2:slave-introspection-safe-graceful EXITED
T-2:trafficserver-{hash-generic}-on-watch RUNNING T-2:trafficserver-{hash-generic}-on-watch RUNNING
T-2:trafficserver-reload EXITED T-2:trafficserver-reload EXITED
T-3:backend-client-login-certificate-caucase-updater-on-watch STOPPED
T-3:backend-haproxy-{hash-generic}-on-watch STOPPED
T-3:backend-haproxy-rsyslogd-{hash-generic}-on-watch STOPPED
T-3:backend-haproxy-safe-graceful EXITED
T-3:bootstrap-monitor EXITED
T-3:certificate_authority-{hash-generic}-on-watch STOPPED
T-3:crond-{hash-generic}-on-watch STOPPED
T-3:expose-csr-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-rsyslogd-{hash-generic}-on-watch STOPPED
T-3:frontend-haproxy-safe-graceful EXITED
T-3:kedifa-login-certificate-caucase-updater-on-watch STOPPED
T-3:kedifa-updater-{hash-generic}-on-watch STOPPED
T-3:logrotate-setup-validate EXITED
T-3:monitor-httpd-{hash-generic}-on-watch STOPPED
T-3:monitor-httpd-graceful EXITED
T-3:slave-instrospection-nginx-{hash-generic}-on-watch STOPPED
T-3:slave-introspection-safe-graceful EXITED
T-3:trafficserver-{hash-generic}-on-watch STOPPED
T-3:trafficserver-reload EXITED
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -843,16 +833,6 @@ ...@@ -843,16 +833,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -883,16 +863,6 @@ ...@@ -883,16 +863,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -5,16 +5,6 @@ ...@@ -5,16 +5,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -67,16 +57,6 @@ ...@@ -67,16 +57,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -107,16 +87,6 @@ ...@@ -107,16 +87,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -257,16 +247,6 @@ ...@@ -257,16 +247,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -297,16 +277,6 @@ ...@@ -297,16 +277,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -4,16 +4,6 @@ ...@@ -4,16 +4,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -65,16 +55,6 @@ ...@@ -65,16 +55,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -105,16 +85,6 @@ ...@@ -105,16 +85,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -6,16 +6,6 @@ ...@@ -6,16 +6,6 @@
"full_address_list": [], "full_address_list": [],
"http3-port": "11443", "http3-port": "11443",
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -845,16 +835,6 @@ ...@@ -845,16 +835,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -885,16 +865,6 @@ ...@@ -885,16 +865,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -6,16 +6,6 @@ ...@@ -6,16 +6,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -214,16 +204,6 @@ ...@@ -214,16 +204,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -255,16 +235,6 @@ ...@@ -255,16 +235,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -6,16 +6,6 @@ ...@@ -6,16 +6,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -54,16 +44,6 @@ ...@@ -54,16 +44,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -95,16 +75,6 @@ ...@@ -95,16 +75,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -6,16 +6,6 @@ ...@@ -6,16 +6,6 @@
"domain": "example.com", "domain": "example.com",
"full_address_list": [], "full_address_list": [],
"instance_title": "testing partition 0", "instance_title": "testing partition 0",
"ip_list": [
[
"T-0",
"@@_ipv4_address@@"
],
[
"T-0",
"@@_ipv6_address@@"
]
],
"kedifa_port": "15080", "kedifa_port": "15080",
"plain_http_port": "11080", "plain_http_port": "11080",
"port": "11443", "port": "11443",
...@@ -54,16 +44,6 @@ ...@@ -54,16 +44,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "kedifa", "instance_title": "kedifa",
"ip_list": [
[
"T-1",
"@@_ipv4_address@@"
],
[
"T-1",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-1", "slap_computer_partition_id": "T-1",
...@@ -95,16 +75,6 @@ ...@@ -95,16 +75,6 @@
}, },
"full_address_list": [], "full_address_list": [],
"instance_title": "caddy-frontend-1", "instance_title": "caddy-frontend-1",
"ip_list": [
[
"T-2",
"@@_ipv4_address@@"
],
[
"T-2",
"@@_ipv6_address@@"
]
],
"root_instance_title": "testing partition 0", "root_instance_title": "testing partition 0",
"slap_computer_id": "local", "slap_computer_id": "local",
"slap_computer_partition_id": "T-2", "slap_computer_partition_id": "T-2",
......
...@@ -15,4 +15,4 @@ ...@@ -15,4 +15,4 @@
[instance.cfg.in] [instance.cfg.in]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 69237df07b8819e2eb683702b8cd199a md5sum = 361991f333119f22c8266dc8bde7bc57
...@@ -110,7 +110,7 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:_buildout_se ...@@ -110,7 +110,7 @@ output = ${buildout:parts-directory}/${:_buildout_section_name_}/${:_buildout_se
recipe = plone.recipe.command recipe = plone.recipe.command
command = command =
if [ ! -f '${:csr}' ] ; then if [ ! -f '${:csr}' ] ; then
{{ openssl_bin }} req \ {{ openssl_bin }} req \
-newkey rsa:2048 \ -newkey rsa:2048 \
-batch \ -batch \
-new \ -new \
......
...@@ -158,14 +158,14 @@ class TestResticRestServer(SlapOSInstanceTestCase): ...@@ -158,14 +158,14 @@ class TestResticRestServer(SlapOSInstanceTestCase):
return cnx.sock._sslobj.getpeercert() return cnx.sock._sslobj.getpeercert()
cert_before = _getpeercert() cert_before = _getpeercert()
# execute certificate updater two month later, when it's time to renew certificate. # execute certificate updater when it's time to renew certificate.
# use a timeout, because this service runs forever # use a timeout, because this service runs forever
subprocess.run( subprocess.run(
( (
'timeout', 'timeout',
'5', '5',
'faketime', 'faketime',
'+2 months', '+63 days',
os.path.join( os.path.join(
self.computer_partition_root_path, self.computer_partition_root_path,
'etc/service/rest-server-certificate-updater'), 'etc/service/rest-server-certificate-updater'),
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# Seleniumrunner responsability is to install Xvfb as # Seleniumrunner responsability is to install Xvfb as
# ${buildout:parts-directory}/xserver/bin/Xvfb, a default firefox as # ${buildout:parts-directory}/xserver/bin/Xvfb, a default firefox as
# ${buildout:bin-directory}/firefox and a geckodriver as # ${buildout:bin-directory}/firefox and a geckodriver as
# ${buildout:bin-directory}/geckodriver for erp5testnode. # ${buildout:bin-directory}/geckodriver for erp5testnode.
[buildout] [buildout]
extends = extends =
......
...@@ -125,7 +125,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin): ...@@ -125,7 +125,7 @@ class TestJupyter(ERP5InstanceTestCase, TestPublishedURLIsReachableMixin):
param_dict = self.getRootPartitionConnectionParameterDict() param_dict = self.getRootPartitionConnectionParameterDict()
self.assertEqual( self.assertEqual(
f'https://[{self._ipv6_address}]:8888/tree', f'https://[{self.getPartitionIPv6(self.getPartitionId("jupyter"))}]:8888/tree',
param_dict['jupyter-url'] param_dict['jupyter-url']
) )
......
...@@ -15,4 +15,4 @@ ...@@ -15,4 +15,4 @@
[template] [template]
filename = instance.cfg filename = instance.cfg
md5sum = ed0f91f39d5eda903938aa527625f40d md5sum = f10fbca22d1d30dd7a4f36e1cd521b97
...@@ -95,6 +95,11 @@ inline = ...@@ -95,6 +95,11 @@ inline =
echo "To work on a test, execute:" echo "To work on a test, execute:"
echo " SLAPOS_TEST_DEBUG=1 {{ interpreter }} -m unittest discover -v" echo " SLAPOS_TEST_DEBUG=1 {{ interpreter }} -m unittest discover -v"
echo "from a folder containing software release test." echo "from a folder containing software release test."
echo "Tip: you may want to also add"
echo " SLAPOS_TEST_SKIP_SOFTWARE_REBUILD=1"
echo " SLAPOS_TEST_SKIP_SOFTWARE_CHECK=1"
echo " SLAPOS_TEST_LOG_DIRECTORY=<some-empty-directory>"
echo "See https://lab.nexedi.com/nexedi/slapos/tree/master/software/slapos-sr-testing"
echo echo
[publish] [publish]
......
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
[instance-theia] [instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in _update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = b406265f591f54a0d5a6aa3ff8522764 md5sum = c484bba770c6404ba0a5b2a958b07a68
[instance] [instance]
_update_hash_filename_ = instance.cfg.in _update_hash_filename_ = instance.cfg.in
md5sum = f322033a7670b9be29b1bf1bf9024b87 md5sum = 9658a11340c018de816d0de40683706a
[instance-import] [instance-import]
_update_hash_filename_ = instance-import.cfg.jinja.in _update_hash_filename_ = instance-import.cfg.jinja.in
......
...@@ -593,7 +593,11 @@ output = $${directory:bin}/$${:_buildout_section_name_} ...@@ -593,7 +593,11 @@ output = $${directory:bin}/$${:_buildout_section_name_}
embedded-request-exitcode-file = $${directory:statefiles}/embedded-request.exitcode embedded-request-exitcode-file = $${directory:statefiles}/embedded-request.exitcode
standalone-ran-before-flag = $${directory:statefiles}/standalone-ran-before.flag standalone-ran-before-flag = $${directory:statefiles}/standalone-ran-before.flag
shared-part-list = shared-part-list =
{%- if 'shared-part-list' in slap_connection %}
{{ slap_connection['shared-part-list'] | indent(2) }}
{%- else %}
{{ """${buildout:shared-part-list}""" | indent(2) }} {{ """${buildout:shared-part-list}""" | indent(2) }}
{%- endif %}
context = context =
raw python_for_standalone ${python-for-standalone:executable} raw python_for_standalone ${python-for-standalone:executable}
raw request_script_path $${directory:project}/request-embedded-instance.sh raw request_script_path $${directory:project}/request-embedded-instance.sh
......
...@@ -33,6 +33,7 @@ url = ${instance-theia:output} ...@@ -33,6 +33,7 @@ url = ${instance-theia:output}
output = $${buildout:directory}/instance-theia.cfg output = $${buildout:directory}/instance-theia.cfg
extensions = jinja2.ext.do extensions = jinja2.ext.do
context = context =
section slap_connection slap-connection
jsonkey default_parameter_dict :default-parameters jsonkey default_parameter_dict :default-parameters
key parameter_dict slap-configuration:configuration key parameter_dict slap-configuration:configuration
key root_title slap-configuration:root-instance-title key root_title slap-configuration:root-instance-title
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
############################################################################## ##############################################################################
from __future__ import unicode_literals from __future__ import unicode_literals
import configparser
import json import json
import logging import logging
import os import os
...@@ -33,6 +34,7 @@ import re ...@@ -33,6 +34,7 @@ import re
import subprocess import subprocess
import sqlite3 import sqlite3
import time import time
import unittest
import netaddr import netaddr
import pexpect import pexpect
...@@ -45,6 +47,7 @@ from six.moves.urllib.parse import urlparse, urljoin ...@@ -45,6 +47,7 @@ from six.moves.urllib.parse import urlparse, urljoin
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass, SlapOSNodeCommandError
from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath from slapos.grid.svcbackend import getSupervisorRPC, _getSupervisordSocketPath
from slapos.proxy.db_version import DB_VERSION from slapos.proxy.db_version import DB_VERSION
from slapos.slap.standalone import SlapOSConfigWriter
theia_software_release_url = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg')) theia_software_release_url = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
...@@ -246,6 +249,12 @@ class TestTheia(TheiaTestCase): ...@@ -246,6 +249,12 @@ class TestTheia(TheiaTestCase):
proxy_path = self.getPath('srv', 'runner', 'var', 'proxy.db') proxy_path = self.getPath('srv', 'runner', 'var', 'proxy.db')
query = "SELECT partition_reference, address FROM partition_network%s" % DB_VERSION query = "SELECT partition_reference, address FROM partition_network%s" % DB_VERSION
ipv6, *prefixlen = self._ipv6_address.split('/')
if not prefixlen:
raise unittest.SkipTest('No IPv6 range')
elif int(prefixlen[0]) >= 123:
raise unittest.SkipTest('IPv6 range too small: %s' % self._ipv6_address)
with sqlite3.connect(proxy_path) as db: with sqlite3.connect(proxy_path) as db:
rows = db.execute(query).fetchall() rows = db.execute(query).fetchall()
partitions = set(p for p, _ in rows) partitions = set(p for p, _ in rows)
...@@ -510,6 +519,32 @@ class TestTheiaEnv(TheiaTestCase): ...@@ -510,6 +519,32 @@ class TestTheiaEnv(TheiaTestCase):
self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME']) self.assertEqual(theia_shell_env['HOME'], supervisord_env['HOME'])
class TestTheiaSharedPath(TheiaTestCase):
bogus_path = 'bogus'
@classmethod
def setUpClass(cls):
super(TestTheiaSharedPath, cls).setUpClass()
# Change shared part list to include bogus paths
cls.slap._shared_part_list.append(cls.bogus_path)
SlapOSConfigWriter(cls.slap).writeConfig(cls.slap._slapos_config)
# Re-instanciate
cls.slap._force_slapos_node_instance_all = True
try:
cls.waitForInstance()
finally:
cls.slap._force_slapos_node_instance_all = False
def test(self):
theia_cfg_path = self.getPath('srv', 'runner', 'etc', 'slapos.cfg')
cfg = configparser.ConfigParser()
cfg.read(theia_cfg_path)
self.assertTrue(cfg.has_option('slapos', 'shared_part_list'))
shared_parts_string = cfg.get('slapos', 'shared_part_list')
shared_parts_list = [s.strip() for s in shared_parts_string.splitlines()]
self.assertIn(self.bogus_path, shared_parts_list)
class ResilientTheiaMixin(object): class ResilientTheiaMixin(object):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
......
...@@ -184,6 +184,7 @@ class ExportAndImportMixin(object): ...@@ -184,6 +184,7 @@ class ExportAndImportMixin(object):
def assertPromiseSucess(self): def assertPromiseSucess(self):
# Force promises to recompute regardless of periodicity # Force promises to recompute regardless of periodicity
old_value = self.slap._force_slapos_node_instance_all
self.slap._force_slapos_node_instance_all = True self.slap._force_slapos_node_instance_all = True
try: try:
self.slap.waitForInstance(error_lines=0) self.slap.waitForInstance(error_lines=0)
...@@ -193,8 +194,8 @@ class ExportAndImportMixin(object): ...@@ -193,8 +194,8 @@ class ExportAndImportMixin(object):
self.assertNotIn('ERROR export script', s) self.assertNotIn('ERROR export script', s)
self.assertNotIn("Promise 'resiliency-import-promise.py' failed", s) self.assertNotIn("Promise 'resiliency-import-promise.py' failed", s)
self.assertNotIn('ERROR import script', s) self.assertNotIn('ERROR import script', s)
else: finally:
pass self.slap._force_slapos_node_instance_all = old_value
def _doExport(self): def _doExport(self):
# Compute last modification of the export exitcode file # Compute last modification of the export exitcode file
...@@ -247,6 +248,7 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC ...@@ -247,6 +248,7 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC
def assertPromiseFailure(self, *msg): def assertPromiseFailure(self, *msg):
# Force promises to recompute regardless of periodicity # Force promises to recompute regardless of periodicity
old_value = self.slap._force_slapos_node_instance_all
self.slap._force_slapos_node_instance_all = True self.slap._force_slapos_node_instance_all = True
try: try:
self.slap.waitForInstance(error_lines=0) self.slap.waitForInstance(error_lines=0)
...@@ -256,6 +258,8 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC ...@@ -256,6 +258,8 @@ class TestTheiaExportAndImportFailures(ExportAndImportMixin, ResilientTheiaTestC
self.assertIn(m, s) self.assertIn(m, s)
else: else:
self.fail('No promise failed') self.fail('No promise failed')
finally:
self.slap._force_slapos_node_instance_all = old_value
def assertScriptFailure(self, func, errorfile, exitfile, *msg): def assertScriptFailure(self, func, errorfile, exitfile, *msg):
self.assertRaises( self.assertRaises(
......
...@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e ...@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 30a1e738a8211887e75a5e75820e5872 md5sum = b056ec4fec956c2a07befd2fec116f04
[template-zeo] [template-zeo]
filename = instance-zeo.cfg.in filename = instance-zeo.cfg.in
......
...@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }} ...@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }} config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }} config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }} config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }} config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope software-type = zope
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%} {% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%}
......
...@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e ...@@ -74,7 +74,7 @@ md5sum = 3f7b28085ceff321a3cb785db60f7c3e
[template-erp5] [template-erp5]
filename = instance-erp5.cfg.in filename = instance-erp5.cfg.in
md5sum = 30a1e738a8211887e75a5e75820e5872 md5sum = 098e1d02159aeca9b36f2a0726b7b230
[template-zeo] [template-zeo]
filename = instance-zeo.cfg.in filename = instance-zeo.cfg.in
......
...@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }} ...@@ -266,7 +266,7 @@ config-test-runner-enabled = {{ dumps(test_runner_enabled) }}
config-test-runner-node-count = {{ dumps(test_runner_node_count) }} config-test-runner-node-count = {{ dumps(test_runner_node_count) }}
config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }} config-test-runner-random-activity-priority = {{ dumps(test_runner_random_activity_priority) }}
config-wcfs_enable = {{ dumps(wcfs_enable) }} config-wcfs_enable = {{ dumps(wcfs_enable) }}
config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }} config-test-runner-configuration = {{ dumps(slapparameter_dict.get('test-runner', {})) }}
software-type = zope software-type = zope
{% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%} {% set global_publisher_timeout = slapparameter_dict.get('publisher-timeout', 300) -%}
...@@ -280,7 +280,10 @@ software-type = zope ...@@ -280,7 +280,10 @@ software-type = zope
{% for custom_name, zope_parameter_dict in six.iteritems(zope_partition_dict) -%} {% for custom_name, zope_parameter_dict in six.iteritems(zope_partition_dict) -%}
{% set partition_name = 'zope-' ~ custom_name -%} {% set partition_name = 'zope-' ~ custom_name -%}
{% set section_name = 'request-' ~ partition_name -%} {% set section_name = 'request-' ~ partition_name -%}
{% set check_software_url_section_name = 'check-software-url' ~ partition_name -%}
{% set promise_software_url_section_name = 'promise-software-url' ~ partition_name -%} {% set promise_software_url_section_name = 'promise-software-url' ~ partition_name -%}
{% set check_test_runner_url_section_name = 'check-test-runner-url' ~ partition_name -%}
{% set promise_test_runner_url_section_name = 'promise-test-runner-url' ~ partition_name -%}
{% set zope_family = zope_parameter_dict.get('family', 'default') -%} {% set zope_family = zope_parameter_dict.get('family', 'default') -%}
{% do zope_family_name_list.append(zope_family) %} {% do zope_family_name_list.append(zope_family) %}
{% set backend_path = zope_parameter_dict.get('backend-path', '') % {'site-id': site_id} %} {% set backend_path = zope_parameter_dict.get('backend-path', '') % {'site-id': site_id} %}
...@@ -315,7 +318,26 @@ config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activi ...@@ -315,7 +318,26 @@ config-activity-timeout = {{ dumps(current_zope_family_override_dict.get('activi
{% if test_runner_enabled -%} {% if test_runner_enabled -%}
config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list} config-test-runner-apache-url-list = ${publish-early:{{ zope_family }}-test-runner-url-list}
[{{ promise_software_url_section_name }}] [{{ check_test_runner_url_section_name }}]
# Promise to wait for zope partition to receive the expected test-runner URL
recipe = slapos.cookbook:check_parameter
value = {{ '${' ~ section_name ~ ':config-test-runner-apache-url-list}' }}
expected-not-value = not-ready
expected-value =
path = ${directory:bin}/${:_buildout_section_name_}
[{{ promise_test_runner_url_section_name }}]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ '${' ~ check_test_runner_url_section_name ~ ':path}' }}
{% do root_common.section(promise_test_runner_url_section_name) -%}
{% endif -%}
[{{ check_software_url_section_name }}]
# Promise to wait for zope partition to use the expected software URL, # Promise to wait for zope partition to use the expected software URL,
# used on upgrades. # used on upgrades.
recipe = slapos.cookbook:check_parameter recipe = slapos.cookbook:check_parameter
...@@ -324,9 +346,14 @@ expected-not-value = ...@@ -324,9 +346,14 @@ expected-not-value =
expected-value = ${slap-connection:software-release-url} expected-value = ${slap-connection:software-release-url}
path = ${directory:bin}/${:_buildout_section_name_} path = ${directory:bin}/${:_buildout_section_name_}
[{{ promise_software_url_section_name }}]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = {{ '${' ~ check_software_url_section_name ~ ':path}' }}
{% do root_common.section(promise_software_url_section_name) -%} {% do root_common.section(promise_software_url_section_name) -%}
{% endif -%}
{% endfor -%} {% endfor -%}
{# if not explicitly configured, connect jupyter to first zope family, which -#} {# if not explicitly configured, connect jupyter to first zope family, which -#}
......
...@@ -297,7 +297,7 @@ simplegeneric = 0.8.1 ...@@ -297,7 +297,7 @@ simplegeneric = 0.8.1
singledispatch = 3.4.0.3 singledispatch = 3.4.0.3
six = 1.16.0 six = 1.16.0
slapos.cookbook = 1.0.329 slapos.cookbook = 1.0.329
slapos.core = 1.10.1 slapos.core = 1.10.2
slapos.extension.shared = 1.0 slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.25 slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5 slapos.rebootstrap = 4.5
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment