Commit f56ff587 authored by Jérome Perrin's avatar Jérome Perrin

Update Release Candidate

parents c58074ba a5adb95c
...@@ -16,7 +16,7 @@ url = http://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_2 ...@@ -16,7 +16,7 @@ url = http://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_2
md5sum = c02582bf9ae338e558617291897615eb md5sum = c02582bf9ae338e558617291897615eb
patch-binary = ${patch:location}/bin/patch patch-binary = ${patch:location}/bin/patch
patches = patches =
${:_profile_base_location_}/ca-certificates-any-python.patch#47c2509f1346bd5af8123fb1a2751c2c ${:_profile_base_location_}/ca-certificates-any-python.patch#c13b44dfc3157dda13a9a2ff97a9d501
${:_profile_base_location_}/ca-certificates-sbin-dir.patch#0b4e7d82ce768823c01954ee41ef177b ${:_profile_base_location_}/ca-certificates-sbin-dir.patch#0b4e7d82ce768823c01954ee41ef177b
patch-options = -p0 patch-options = -p0
configure-command = true configure-command = true
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
all: all:
- python3 certdata2pem.py - python3 certdata2pem.py
+ for x in '' 2 3; do type python$$x && break; done >/dev/null \ + for x in 3 '' 2; do type python$$x && break; done >/dev/null \
+ && python$$x certdata2pem.py + && python$$x certdata2pem.py
clean: clean:
......
...@@ -9,9 +9,6 @@ parts = cythonplus_env.sh ...@@ -9,9 +9,6 @@ parts = cythonplus_env.sh
[gcc] [gcc]
min_version = 8.4 min_version = 8.4
[python]
part = python3
# Dependencies for the Cython+ test suite # Dependencies for the Cython+ test suite
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
......
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[fluentbit-plugin-wendelin] [fluentbit-plugin-wendelin]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
share = true share = true
url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/v0.1i-dev_buildout/fluentbit-plugin-wendelin-v0.1i-dev_buildout.tar.gz url = https://lab.nexedi.com/nexedi/fluentbit-plugin-wendelin/-/archive/v0.1j-dev_buildout/fluentbit-plugin-wendelin-v0.1j-dev_buildout.tar.gz
md5sum = 9ed2ef46b0edfca072255b849ee65249 md5sum = 98cd6c892ff92cdeaffa0310840a476a
configure-command = echo "No configure command." configure-command = echo "No configure command."
environment = environment =
PATH=${golang1.17:location}/bin:%(PATH)s PATH=${golang1.17:location}/bin:%(PATH)s
......
...@@ -7,6 +7,7 @@ extends = ...@@ -7,6 +7,7 @@ extends =
../perl/buildout.cfg ../perl/buildout.cfg
../xz-utils/buildout.cfg ../xz-utils/buildout.cfg
../zlib/buildout.cfg ../zlib/buildout.cfg
../defaults.cfg
parts = parts =
glib glib
...@@ -25,7 +26,7 @@ configure-options = ...@@ -25,7 +26,7 @@ configure-options =
--disable-fam --disable-fam
--disable-xattr --disable-xattr
--disable-man --disable-man
--with-python=$PYTHON --with-python=python${python:version}
environment = environment =
PATH=${gettext:location}/bin:${patch:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s PATH=${gettext:location}/bin:${patch:location}/bin:${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
CPPFLAGS=-I${gettext:location}/include -I${zlib:location}/include CPPFLAGS=-I${gettext:location}/include -I${zlib:location}/include
......
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[libxml2] [libxml2]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz url = https://download.gnome.org/sources/libxml2/2.10/libxml2-2.10.3.tar.xz
md5sum = b7b3029ac6beb32a7925225515f83ca3 md5sum = f9edac7fac232b3657a003fd9a5bbe42
configure-options = configure-options =
--disable-static --disable-static
--without-python --without-python
......
...@@ -7,8 +7,8 @@ parts = ...@@ -7,8 +7,8 @@ parts =
libxslt libxslt
[libxslt] [libxslt]
url = https://download.gnome.org/sources/libxslt/1.1/libxslt-1.1.35.tar.xz url = https://download.gnome.org/sources/libxslt/1.1/libxslt-1.1.37.tar.xz
md5sum = 5b3a634b77effd8a6268c21173575053 md5sum = 84e86fc8a1b7495674016e05e4c5da44
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
# --disable-static is temporarilly removed due to build error # --disable-static is temporarilly removed due to build error
......
...@@ -99,8 +99,8 @@ post-install = ...@@ -99,8 +99,8 @@ post-install =
# as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir ) # as plugin-dir ( https://mariadb.com/kb/en/server-system-variables/#plugin_dir )
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
shared = true shared = true
url = https://packages.groonga.org/source/mroonga/mroonga-12.07.tar.gz url = https://packages.groonga.org/source/mroonga/mroonga-12.09.tar.gz
md5sum = bee7506e08deda9a607d85ed03b8b62e md5sum = 637d73b86239cc9c3758e9486746d430
pre-configure = pre-configure =
rm -rf fake_mariadb_source rm -rf fake_mariadb_source
mkdir -p fake_mariadb_source mkdir -p fake_mariadb_source
......
...@@ -12,8 +12,8 @@ parts = ...@@ -12,8 +12,8 @@ parts =
[mca] [mca]
recipe = slapos.recipe.cmmi recipe = slapos.recipe.cmmi
url = https://lab.nexedi.com/nexedi/metadata-collect-agent/-/archive/v0.2h-dev_buildout/metadata-collect-agent-v0.2h-dev_buildout.tar.gz url = https://lab.nexedi.com/nexedi/metadata-collect-agent/-/archive/v0.3.0/metadata-collect-agent-v0.3.0.tar.gz
md5sum = f394ea9507d13a0b18f9485e70abaf32 md5sum = 9c1f6582848be94e99af10d31dd5e0ba
configure-command = : configure-command = :
make-targets = make-targets =
no-dracut no-dracut
......
...@@ -36,7 +36,7 @@ post-install = ...@@ -36,7 +36,7 @@ post-install =
install -Dt %(location)s/bin %(make-targets)s install -Dt %(location)s/bin %(make-targets)s
environment = environment =
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${libgcrypt:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${gnutls:location}/lib/pkgconfig:${libgcrypt:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${pcre:location}/lib/pkgconfig
PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake:location}/bin:%(PATH)s:${python2.7:location}/bin PATH=${m4:location}/bin:${libtool:location}/bin:${libgcrypt:location}/bin:${curl:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:${bzip2:location}/bin:${autoconf:location}/bin:${git:location}/bin:${automake:location}/bin:${patch:location}/bin:${cmake:location}/bin:${python2.7:location}/bin:%(PATH)s
CXXFLAGS=-I${openssl:location}/include -I${gnutls:location}/include -I${zlib:location}/include CXXFLAGS=-I${openssl:location}/include -I${gnutls:location}/include -I${zlib:location}/include
CFLAGS=-I${gnutls:location}/include CFLAGS=-I${gnutls:location}/include
LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath=${curl:location}/lib -L${libtool:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${curl:location}/lib -L${pcre:location}/lib -L${jemalloc:location}/lib -L${libmicrohttpd:location}/lib LDFLAGS=-L${openssl:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -L${gnutls:location}/lib -Wl,-rpath=${curl:location}/lib -L${libtool:location}/lib -L${zlib:location}/lib -Wl,-rpath -Wl,${zlib:location}/lib -L${curl:location}/lib -L${pcre:location}/lib -L${jemalloc:location}/lib -L${libmicrohttpd:location}/lib
...@@ -47,4 +47,4 @@ environment = ...@@ -47,4 +47,4 @@ environment =
patch-options = -p1 patch-options = -p1
patches = patches =
# PR #3402 (mariadb_client: backport patch to fix syntax error in cmake 3.20) # PR #3402 (mariadb_client: backport patch to fix syntax error in cmake 3.20)
https://github.com/sysown/proxysql/commit/a3cfa56d257219f7610cd5711045bb5d84485a91.patch https://github.com/sysown/proxysql/commit/a3cfa56d257219f7610cd5711045bb5d84485a91.patch#bb034744fd9b676484d34a269ab2ed07
# Python bindings for the XML Security Library.
# https://xmlsec.readthedocs.io/
[buildout]
extends =
../lxml-python/buildout.cfg
../xmlsec/buildout.cfg
[python-xmlsec]
recipe = zc.recipe.egg:custom
egg = xmlsec
rpath =
${libxml2:location}/lib/
${libxslt:location}/lib/
${openssl:location}/lib/
${xmlsec:location}/lib/
${zlib:location}/lib/
setup-eggs =
${lxml-python:egg}
pkgconfig
pathlib2
setuptools-scm
toml
environment = python-xmlsec-env
[python-xmlsec-env]
PKG_CONFIG=${pkgconfig:location}/bin/pkg-config
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig:${xmlsec:location}/lib/pkgconfig
[versions]
xmlsec = 1.3.13
setuptools-scm = 7.0.5
toml = 0.10.2
[versions:python2]
xmlsec = 1.3.9
setuptools-scm = 5.0.2
# XML Security Library
# https://www.aleksey.com/xmlsec/
[buildout]
extends =
../libxml2/buildout.cfg
../libxslt/buildout.cfg
../openssl/buildout.cfg
../pkgconfig/buildout.cfg
[xmlsec]
recipe = slapos.recipe.cmmi
url = https://www.aleksey.com/xmlsec/download/xmlsec1-1.2.34.tar.gz
md5sum = 87b0074e7ae535e061acf8ef64dada1b
shared = true
configure-options =
--disable-crypto-dl
environment =
PATH=${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libxml2:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig
LDFLAGS=-Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${zlib:location}/lib
[xmlsec:python2]
# Newer versions are not compatible with python2 version of python-xmlsec
url = https://www.aleksey.com/xmlsec/download/xmlsec1-1.2.30.tar.gz
md5sum = b66ec21e0a0ac331afb4b1bc5c9ef966
...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages ...@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob import glob
import os import os
version = '1.0.272' version = '1.0.291'
name = 'slapos.cookbook' name = 'slapos.cookbook'
long_description = open("README.rst").read() long_description = open("README.rst").read()
......
...@@ -65,8 +65,8 @@ class Recipe(object): ...@@ -65,8 +65,8 @@ class Recipe(object):
Software type of requested instance, among those provided by the Software type of requested instance, among those provided by the
definition from software-url. definition from software-url.
slave (optional, defaults to false) shared (optional, defaults to false)
Set to "true" when requesting a slave instance, ie just setting a set of Set to "true" when requesting a shared instance, ie just setting a set of
parameters in an existing instance. parameters in an existing instance.
sla (optional) sla (optional)
...@@ -119,7 +119,7 @@ class Recipe(object): ...@@ -119,7 +119,7 @@ class Recipe(object):
partition_parameter_kw = self._filterForStorage({k[7:]: v partition_parameter_kw = self._filterForStorage({k[7:]: v
for k, v in six.iteritems(options) for k, v in six.iteritems(options)
if k.startswith('config-')}) if k.startswith('config-')})
slave = options.get('slave', 'false').lower() in \ shared = options.get('shared', 'false').lower() in \
librecipe.GenericBaseRecipe.TRUE_VALUES librecipe.GenericBaseRecipe.TRUE_VALUES
# By default, propagate the state of the parent instance # By default, propagate the state of the parent instance
...@@ -162,12 +162,12 @@ class Recipe(object): ...@@ -162,12 +162,12 @@ class Recipe(object):
try: try:
self.instance = request(software_url, software_type, self.instance = request(software_url, software_type,
name, partition_parameter_kw=partition_parameter_kw, name, partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw, shared=slave, state=requested_state) filter_kw=filter_kw, shared=shared, state=requested_state)
return_parameter_dict = self._getReturnParameterDict(self.instance, return_parameter_dict = self._getReturnParameterDict(self.instance,
return_parameters) return_parameters)
# Fetch the instance-guid and the instance-state # Fetch the instance-guid and the instance-state
# Note: SlapOS Master does not support it for slave instances # Note: SlapOS Master does not support it for shared instances
if not slave: if not shared:
try: try:
options['instance-guid'] = self.instance.getInstanceGuid() \ options['instance-guid'] = self.instance.getInstanceGuid() \
.encode('UTF-8') .encode('UTF-8')
...@@ -189,7 +189,7 @@ class Recipe(object): ...@@ -189,7 +189,7 @@ class Recipe(object):
request_name=name, request_name=name,
partition_parameter_kw=partition_parameter_kw, partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw, filter_kw=filter_kw,
shared=slave, shared=shared,
state=requested_state state=requested_state
) )
) )
......
...@@ -96,9 +96,9 @@ eggs += ...@@ -96,9 +96,9 @@ eggs +=
[beremiz] [beremiz]
recipe = slapos.recipe.build:download-unpacked recipe = slapos.recipe.build:download-unpacked
# download beremiz at revision c9b7db300a25806ccaa9d5a844d1e0fd281acb4b # download beremiz at revision f703a6206832e14d7545d88428a7c81335a75004
url = https://github.com/beremiz/beremiz/archive/5cd7885043d4801279842d0c0632a58986b878f1.tar.gz url = https://github.com/beremiz/beremiz/archive/f703a6206832e14d7545d88428a7c81335a75004.tar.gz
md5sum = bd2647114749c3c3154f2f4bc4274adb md5sum = 01e191324837c9365121a31438b0d350
[beremiz-setup] [beremiz-setup]
recipe = zc.recipe.egg:develop recipe = zc.recipe.egg:develop
......
...@@ -233,6 +233,8 @@ Please be aware that the `health-check-timeout` is really short by default, so i ...@@ -233,6 +233,8 @@ Please be aware that the `health-check-timeout` is really short by default, so i
Thanks to using health-check it's possible to configure failover system. By providing `health-check-failover-url` or `health-check-failover-https-url` some special backend can be used to reply in case if original backend replies with error (codes like `5xx`). As a note one can setup this failover URL like `https://failover.example.com/?p=` so that the path from the incoming request will be passed as parameter. Additionally authentication to failover URL is supported with `health-check-authenticate-to-failover-backend` and SSL Proxy verification with `health-check-failover-ssl-proxy-verify` and `health-check-failover-ssl-proxy-ca-crt`. Thanks to using health-check it's possible to configure failover system. By providing `health-check-failover-url` or `health-check-failover-https-url` some special backend can be used to reply in case if original backend replies with error (codes like `5xx`). As a note one can setup this failover URL like `https://failover.example.com/?p=` so that the path from the incoming request will be passed as parameter. Additionally authentication to failover URL is supported with `health-check-authenticate-to-failover-backend` and SSL Proxy verification with `health-check-failover-ssl-proxy-verify` and `health-check-failover-ssl-proxy-ca-crt`.
**Note**: It's important to correctly configure failover URL response, especially in case if it's expected to use `stale-if-error` simulation available while `enable_cache` is used. In order to serve pages from cache the failover URL have to return error HTTP code (like 503 SERVICE_UNAVAILABLE), so that in such case cached page will have precedence over the reply from failover URL.
Examples Examples
======== ========
......
...@@ -109,13 +109,6 @@ def new_getaddrinfo(*args): ...@@ -109,13 +109,6 @@ def new_getaddrinfo(*args):
return DNS_CACHE[args[:2]] return DNS_CACHE[args[:2]]
# for development: debugging logs and install Ctrl+C handler
if os.environ.get('SLAPOS_TEST_DEBUG'):
logging.basicConfig(level=logging.DEBUG)
import unittest
unittest.installHandler()
def der2pem(der): def der2pem(der):
certificate = x509.load_der_x509_certificate(der, default_backend()) certificate = x509.load_der_x509_certificate(der, default_backend())
return certificate.public_bytes(serialization.Encoding.PEM) return certificate.public_bytes(serialization.Encoding.PEM)
...@@ -282,6 +275,58 @@ def isHTTP2(domain): ...@@ -282,6 +275,58 @@ def isHTTP2(domain):
return 'Using HTTP2, server supports'.encode() in err return 'Using HTTP2, server supports'.encode() in err
class AtsMixin(object):
def _hack_ats(self, max_stale_age):
records_config = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'trafficserver', 'records.config'
))
self.assertEqual(1, len(records_config))
self._hack_ats_records_config_path = records_config[0]
original_max_stale_age = \
'CONFIG proxy.config.http.cache.max_stale_age INT 604800\n'
new_max_stale_age = \
'CONFIG proxy.config.http.cache.max_stale_age INT %s\n' % (
max_stale_age,)
with open(self._hack_ats_records_config_path) as fh:
self._hack_ats_original_records_config = fh.readlines()
# sanity check - are we really do it?
self.assertIn(
original_max_stale_age,
self._hack_ats_original_records_config)
new_records_config = []
max_stale_age_changed = False
for line in self._hack_ats_original_records_config:
if line == original_max_stale_age:
line = new_max_stale_age
max_stale_age_changed = True
new_records_config.append(line)
self.assertTrue(max_stale_age_changed)
with open(self._hack_ats_records_config_path, 'w') as fh:
fh.write(''.join(new_records_config))
self._hack_ats_restart()
def _unhack_ats(self):
with open(self._hack_ats_records_config_path, 'w') as fh:
fh.write(''.join(self._hack_ats_original_records_config))
self._hack_ats_restart()
def _hack_ats_restart(self):
for process_info in self.callSupervisorMethod('getAllProcessInfo'):
if process_info['name'].startswith(
'trafficserver') and process_info['name'].endswith('-on-watch'):
self.callSupervisorMethod(
'stopProcess', '%(group)s:%(name)s' % process_info)
self.callSupervisorMethod(
'startProcess', '%(group)s:%(name)s' % process_info)
# give short time for the ATS to start back
time.sleep(5)
for process_info in self.callSupervisorMethod('getAllProcessInfo'):
if process_info['name'].startswith(
'trafficserver') and process_info['name'].endswith('-on-watch'):
self.assertEqual(process_info['statename'], 'RUNNING')
class TestDataMixin(object): class TestDataMixin(object):
def getTrimmedProcessInfo(self): def getTrimmedProcessInfo(self):
return '\n'.join(sorted([ return '\n'.join(sorted([
...@@ -561,11 +606,7 @@ class TestHandler(BaseHTTPRequestHandler): ...@@ -561,11 +606,7 @@ class TestHandler(BaseHTTPRequestHandler):
server_version = "TestBackend" server_version = "TestBackend"
sys_version = "" sys_version = ""
def log_message(self, *args): log_message = logging.getLogger(__name__ + '.TestHandler').info
if os.environ.get('SLAPOS_TEST_DEBUG'):
return BaseHTTPRequestHandler.log_message(self, *args)
else:
return
def do_DELETE(self): def do_DELETE(self):
config = self.configuration.pop(self.path, None) config = self.configuration.pop(self.path, None)
...@@ -1582,7 +1623,7 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest( ...@@ -1582,7 +1623,7 @@ class TestMasterAIKCDisabledAIBCCDisabledRequest(
) )
class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
...@@ -3876,56 +3917,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -3876,56 +3917,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertRegex(ats_log, direct_pattern) self.assertRegex(ats_log, direct_pattern)
# END: Check that squid.log is correctly filled in # END: Check that squid.log is correctly filled in
def _hack_ats(self, max_stale_age):
records_config = glob.glob(
os.path.join(
self.instance_path, '*', 'etc', 'trafficserver', 'records.config'
))
self.assertEqual(1, len(records_config))
self._hack_ats_records_config_path = records_config[0]
original_max_stale_age = \
'CONFIG proxy.config.http.cache.max_stale_age INT 604800\n'
new_max_stale_age = \
'CONFIG proxy.config.http.cache.max_stale_age INT %s\n' % (
max_stale_age,)
with open(self._hack_ats_records_config_path) as fh:
self._hack_ats_original_records_config = fh.readlines()
# sanity check - are we really do it?
self.assertIn(
original_max_stale_age,
self._hack_ats_original_records_config)
new_records_config = []
max_stale_age_changed = False
for line in self._hack_ats_original_records_config:
if line == original_max_stale_age:
line = new_max_stale_age
max_stale_age_changed = True
new_records_config.append(line)
self.assertTrue(max_stale_age_changed)
with open(self._hack_ats_records_config_path, 'w') as fh:
fh.write(''.join(new_records_config))
self._hack_ats_restart()
def _unhack_ats(self):
with open(self._hack_ats_records_config_path, 'w') as fh:
fh.write(''.join(self._hack_ats_original_records_config))
self._hack_ats_restart()
def _hack_ats_restart(self):
for process_info in self.callSupervisorMethod('getAllProcessInfo'):
if process_info['name'].startswith(
'trafficserver') and process_info['name'].endswith('-on-watch'):
self.callSupervisorMethod(
'stopProcess', '%(group)s:%(name)s' % process_info)
self.callSupervisorMethod(
'startProcess', '%(group)s:%(name)s' % process_info)
# give short time for the ATS to start back
time.sleep(5)
for process_info in self.callSupervisorMethod('getAllProcessInfo'):
if process_info['name'].startswith(
'trafficserver') and process_info['name'].endswith('-on-watch'):
self.assertEqual(process_info['statename'], 'RUNNING')
def test_enable_cache_negative_revalidate(self): def test_enable_cache_negative_revalidate(self):
parameter_dict = self.assertSlaveBase('enable_cache') parameter_dict = self.assertSlaveBase('enable_cache')
...@@ -4553,7 +4544,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -4553,7 +4544,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotIn('X-Backend-Identification', result.headers) self.assertNotIn('X-Backend-Identification', result.headers)
class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin): class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
instance_parameter_dict = { instance_parameter_dict = {
'domain': 'example.com', 'domain': 'example.com',
'port': HTTPS_PORT, 'port': HTTPS_PORT,
...@@ -6774,7 +6765,7 @@ class TestPassedRequestParameter(HttpFrontendTestCase): ...@@ -6774,7 +6765,7 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
) )
class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin): class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin, AtsMixin):
@classmethod @classmethod
def getInstanceParameterDict(cls): def getInstanceParameterDict(cls):
return { return {
...@@ -6816,6 +6807,7 @@ class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin): ...@@ -6816,6 +6807,7 @@ class TestSlaveHealthCheck(SlaveHttpFrontendTestCase, TestDataMixin):
}, },
'health-check-failover-url': { 'health-check-failover-url': {
'https-only': False, # http and https access to check 'https-only': False, # http and https access to check
'enable_cache': True,
'health-check-timeout': 1, # fail fast for test 'health-check-timeout': 1, # fail fast for test
'health-check-interval': 1, # fail fast for test 'health-check-interval': 1, # fail fast for test
'url': cls.backend_url + 'url', 'url': cls.backend_url + 'url',
...@@ -6969,12 +6961,63 @@ backend _health-check-default-http ...@@ -6969,12 +6961,63 @@ backend _health-check-default-http
slave_parameter_dict = self.getSlaveParameterDictDict()[ slave_parameter_dict = self.getSlaveParameterDictDict()[
'health-check-failover-url'] 'health-check-failover-url']
# check normal access source_ip = '127.0.0.1'
max_stale_age = 30
max_age = int(max_stale_age / 2.)
body_200 = 'Body 200' * 500
body_failover = 'Failover response'
cached_path = self.id()
self.addCleanup(self._unhack_ats)
self._hack_ats(max_stale_age)
# Prerequisite for cache: setup failover backend with proper code
# for normal access (not cached, typical scenario) and cached access
# in order to check ATS behaviour
for path in ['/failoverpath', '/' + cached_path]:
for url in [
'failover-url?a=b&c=',
'failover-https-url?a=b&c='
]:
result = requests.put(
self.backend_url + url + path,
headers={
'X-Reply-Status-Code': '503',
'X-Reply-Body': base64.b64encode(body_failover.encode()),
})
self.assertEqual(result.status_code, http.client.CREATED)
def configureResult(status_code, body):
backend_url = self.getSlaveParameterDictDict()[
'health-check-failover-url']['https-url']
result = requests.put(
'/'.join([backend_url, cached_path]),
headers={
'X-Reply-Header-Cache-Control': 'max-age=%s, public' % (max_age,),
'X-Reply-Status-Code': status_code,
'X-Reply-Body': base64.b64encode(body.encode()),
# drop Content-Length header to ensure
# https://github.com/apache/trafficserver/issues/7880
'X-Drop-Header': 'Content-Length',
})
self.assertEqual(result.status_code, http.client.CREATED)
def checkResult(status_code, body):
result = fakeHTTPSResult(
parameter_dict['domain'], cached_path,
source_ip=source_ip
)
self.assertEqual(result.status_code, status_code)
self.assertEqual(result.text, body)
# check normal access...
result = fakeHTTPResult(parameter_dict['domain'], '/path') result = fakeHTTPResult(parameter_dict['domain'], '/path')
self.assertEqualResultJson(result, 'Path', '/url/path') self.assertEqualResultJson(result, 'Path', '/url/path')
result = fakeHTTPSResult(parameter_dict['domain'], '/path') result = fakeHTTPSResult(parameter_dict['domain'], '/path')
self.assertEqual(self.certificate_pem, der2pem(result.peercert)) self.assertEqual(self.certificate_pem, der2pem(result.peercert))
self.assertEqualResultJson(result, 'Path', '/https-url/path') self.assertEqualResultJson(result, 'Path', '/https-url/path')
# ...and cached result, also in order to store it in the cache
configureResult('200', body_200)
checkResult(http.client.OK, body_200)
# start replying with bad status code # start replying with bad status code
result = requests.put( result = requests.put(
...@@ -6993,10 +7036,11 @@ backend _health-check-default-http ...@@ -6993,10 +7036,11 @@ backend _health-check-default-http
time.sleep(3) # > health-check-timeout + health-check-interval time.sleep(3) # > health-check-timeout + health-check-interval
# check simple failover
result = fakeHTTPSResult(parameter_dict['domain'], '/failoverpath') result = fakeHTTPSResult(parameter_dict['domain'], '/failoverpath')
self.assertEqual(self.certificate_pem, der2pem(result.peercert)) self.assertEqual(self.certificate_pem, der2pem(result.peercert))
self.assertEqualResultJson( self.assertEqual(result.status_code, http.client.SERVICE_UNAVAILABLE)
result, 'Path', '/failover-https-url?a=b&c=/failoverpath') self.assertEqual(result.text, body_failover)
self.assertLastLogLineRegexp( self.assertLastLogLineRegexp(
'_health-check-failover-url_backend_log', '_health-check-failover-url_backend_log',
...@@ -7005,14 +7049,14 @@ backend _health-check-default-http ...@@ -7005,14 +7049,14 @@ backend _health-check-default-http
r'https-backend _health-check-failover-url-https-failover' r'https-backend _health-check-failover-url-https-failover'
r'\/_health-check-failover-url-backend-https ' r'\/_health-check-failover-url-backend-https '
r'\d+/\d+\/\d+\/\d+\/\d+ ' r'\d+/\d+\/\d+\/\d+\/\d+ '
r'200 \d+ - - ---- ' r'503 \d+ - - ---- '
r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ ' r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ '
r'"GET /failoverpath HTTP/1.1"' r'"GET /failoverpath HTTP/1.1"'
) )
result = fakeHTTPResult(parameter_dict['domain'], '/failoverpath') result = fakeHTTPResult(parameter_dict['domain'], '/failoverpath')
self.assertEqualResultJson( self.assertEqual(result.status_code, http.client.SERVICE_UNAVAILABLE)
result, 'Path', '/failover-url?a=b&c=/failoverpath') self.assertEqual(result.text, body_failover)
self.assertLastLogLineRegexp( self.assertLastLogLineRegexp(
'_health-check-failover-url_backend_log', '_health-check-failover-url_backend_log',
r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ ' r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ '
...@@ -7020,11 +7064,23 @@ backend _health-check-default-http ...@@ -7020,11 +7064,23 @@ backend _health-check-default-http
r'http-backend _health-check-failover-url-http-failover' r'http-backend _health-check-failover-url-http-failover'
r'\/_health-check-failover-url-backend-http ' r'\/_health-check-failover-url-backend-http '
r'\d+/\d+\/\d+\/\d+\/\d+ ' r'\d+/\d+\/\d+\/\d+\/\d+ '
r'200 \d+ - - ---- ' r'503 \d+ - - ---- '
r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ ' r'\d+\/\d+\/\d+\/\d+\/\d+ \d+\/\d+ '
r'"GET /failoverpath HTTP/1.1"' r'"GET /failoverpath HTTP/1.1"'
) )
# It's time to check that ATS gives cached result, even if failover
# backend is used
checkResult(http.client.OK, body_200)
# interesting moment, time is between max_age and max_stale_age, triggers
# https://github.com/apache/trafficserver/issues/7880
# which is stale-if-error simulated by ATS while using failover backend
time.sleep(max_age + 1 - 3)
checkResult(http.client.OK, body_200)
# max_stale_age passed, time to return 502 with failover url
time.sleep(max_stale_age + 2 - 3)
checkResult(http.client.SERVICE_UNAVAILABLE, body_failover)
def test_health_check_failover_url_netloc_list(self): def test_health_check_failover_url_netloc_list(self):
parameter_dict = self.assertSlaveBase( parameter_dict = self.assertSlaveBase(
'health-check-failover-url-netloc-list') 'health-check-failover-url-netloc-list')
......
...@@ -61,6 +61,7 @@ ...@@ -61,6 +61,7 @@
"url": "http://@@_ipv4_address@@:@@_server_http_port@@/" "url": "http://@@_ipv4_address@@:@@_server_http_port@@/"
}, },
{ {
"enable_cache": true,
"health-check": true, "health-check": true,
"health-check-failover-https-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=", "health-check-failover-https-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=",
"health-check-failover-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=", "health-check-failover-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=",
...@@ -183,6 +184,7 @@ ...@@ -183,6 +184,7 @@
"url": "http://@@_ipv4_address@@:@@_server_http_port@@/" "url": "http://@@_ipv4_address@@:@@_server_http_port@@/"
}, },
{ {
"enable_cache": true,
"health-check": true, "health-check": true,
"health-check-failover-https-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=", "health-check-failover-https-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=",
"health-check-failover-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=", "health-check-failover-url": "http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=",
...@@ -279,7 +281,7 @@ ...@@ -279,7 +281,7 @@
"backend-client-caucase-url": "http://[@@_ipv6_address@@]:8990", "backend-client-caucase-url": "http://[@@_ipv6_address@@]:8990",
"cluster-identification": "testing partition 0", "cluster-identification": "testing partition 0",
"domain": "example.com", "domain": "example.com",
"extra_slave_instance_list": "[{\"health-check\": true, \"health-check-http-method\": \"CONNECT\", \"slave_reference\": \"_health-check-connect\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-fall\": \"7\", \"health-check-http-method\": \"POST\", \"health-check-http-path\": \"/POST-path to be encoded\", \"health-check-http-version\": \"HTTP/1.0\", \"health-check-interval\": \"15\", \"health-check-rise\": \"3\", \"health-check-timeout\": \"7\", \"slave_reference\": \"_health-check-custom\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"slave_reference\": \"_health-check-default\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"slave_reference\": \"_health-check-disabled\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-failover-https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=\", \"health-check-http-path\": \"/health-check-failover-url\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-authenticate-to-failover-backend\": true, \"health-check-failover-https-url\": \"https://@@_ipv4_address@@:@@_server_https_auth_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_auth_port@@/failover-url?a=b&c=\", \"health-check-http-path\": \"/health-check-failover-url-auth-to-backend\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url-auth-to-backend\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-failover-https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=\", \"health-check-failover-url-netloc-list\": \"@@_ipv4_address@@:@@_server_netloc_a_http_port@@ @@_ipv4_address@@:@@_server_netloc_b_http_port@@\", \"health-check-http-path\": \"/health-check-failover-url\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url-netloc-list\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-ca-crt\": \"@@test_server_ca.certificate_pem_double@@\", \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verified\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verified\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verify-missing\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verify-missing\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-ca-crt\": \"@@another_server_ca.certificate_pem_double@@\", \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verify-unverified\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verify-unverified\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}]", "extra_slave_instance_list": "[{\"health-check\": true, \"health-check-http-method\": \"CONNECT\", \"slave_reference\": \"_health-check-connect\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-fall\": \"7\", \"health-check-http-method\": \"POST\", \"health-check-http-path\": \"/POST-path to be encoded\", \"health-check-http-version\": \"HTTP/1.0\", \"health-check-interval\": \"15\", \"health-check-rise\": \"3\", \"health-check-timeout\": \"7\", \"slave_reference\": \"_health-check-custom\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"slave_reference\": \"_health-check-default\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"slave_reference\": \"_health-check-disabled\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"enable_cache\": true, \"health-check\": true, \"health-check-failover-https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=\", \"health-check-http-path\": \"/health-check-failover-url\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-authenticate-to-failover-backend\": true, \"health-check-failover-https-url\": \"https://@@_ipv4_address@@:@@_server_https_auth_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_auth_port@@/failover-url?a=b&c=\", \"health-check-http-path\": \"/health-check-failover-url-auth-to-backend\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url-auth-to-backend\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-failover-https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-https-url?a=b&c=\", \"health-check-failover-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/failover-url?a=b&c=\", \"health-check-failover-url-netloc-list\": \"@@_ipv4_address@@:@@_server_netloc_a_http_port@@ @@_ipv4_address@@:@@_server_netloc_b_http_port@@\", \"health-check-http-path\": \"/health-check-failover-url\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"https-only\": false, \"https-url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/https-url\", \"slave_reference\": \"_health-check-failover-url-netloc-list\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/url\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-ca-crt\": \"@@test_server_ca.certificate_pem_double@@\", \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verified\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verified\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verify-missing\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verify-missing\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}, {\"health-check\": true, \"health-check-failover-ssl-proxy-ca-crt\": \"@@another_server_ca.certificate_pem_double@@\", \"health-check-failover-ssl-proxy-verify\": true, \"health-check-failover-url\": \"https://@@_ipv4_address@@:@@_server_https_port@@/\", \"health-check-http-path\": \"/health-check-failover-url-ssl-proxy-verify-unverified\", \"health-check-interval\": 1, \"health-check-timeout\": 1, \"slave_reference\": \"_health-check-failover-url-ssl-proxy-verify-unverified\", \"url\": \"http://@@_ipv4_address@@:@@_server_http_port@@/\"}]",
"frontend-name": "caddy-frontend-1", "frontend-name": "caddy-frontend-1",
"kedifa-caucase-url": "http://[@@_ipv6_address@@]:15090", "kedifa-caucase-url": "http://[@@_ipv6_address@@]:15090",
"master-key-download-url": "https://[@@_ipv6_address@@]:15080/@@master-key-download-url_endpoint@@", "master-key-download-url": "https://[@@_ipv6_address@@]:15080/@@master-key-download-url_endpoint@@",
......
...@@ -18,4 +18,8 @@ md5sum = d1e4d7306c39f2ebc64d0407860d4301 ...@@ -18,4 +18,8 @@ md5sum = d1e4d7306c39f2ebc64d0407860d4301
[template-cloudooo-instance] [template-cloudooo-instance]
filename = instance-cloudooo.cfg.in filename = instance-cloudooo.cfg.in
md5sum = 90299c1dbdc5f983613794a8e9a7bc9d md5sum = 3c499fd3cdfc7915d6eaf1cf4130b56d
[template-haproxy-cfg]
filename = haproxy.cfg.in
md5sum = 3d989eeb2e326bb0daf9351850f7fc39
global
maxconn 4096
stats socket {{ parameter_dict['socket-path'] }} level admin
master-worker
pidfile {{ parameter_dict['pidfile'] }}
defaults
mode http
retries 1
option redispatch
maxconn 2000
timeout server 305s
timeout queue 60s
timeout connect 5s
timeout client 305s
option httpclose
listen cloudooo
bind {{ parameter_dict['ip'] }}:{{ parameter_dict['port'] }}
balance roundrobin
stats uri /haproxy
{% for i, backend_netloc in enumerate(parameter_dict['backend-list'].splitlines()) -%}
server cloudooo_{{ i + 1 }} {{ backend_netloc }} rise 1 fall 2 maxqueue 5 maxconn 1
{% endfor %}
...@@ -68,7 +68,7 @@ parts = ...@@ -68,7 +68,7 @@ parts =
apache-promise apache-promise
apache-logrotate apache-logrotate
cloudooo-test-runner cloudooo-test-runner
haproxy haproxy-service
xvfb-instance xvfb-instance
wkhtmltopdf-on-xvfb wkhtmltopdf-on-xvfb
...@@ -181,17 +181,25 @@ wrapper = ${directory:services}/{{ name }} ...@@ -181,17 +181,25 @@ wrapper = ${directory:services}/{{ name }}
{% endfor -%} {% endfor -%}
[haproxy-cfg]
recipe = slapos.recipe.template:jinja2
url = {{ parameter_dict['template-haproxy-cfg'] }}
output = ${directory:etc}/haproxy.cfg
context =
section parameter_dict haproxy
[haproxy-service]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/${:_buildout_section_name_}
command-line =
"{{ parameter_dict['haproxy'] }}/sbin/haproxy" -f "${haproxy-cfg:output}"
hash-files = ${haproxy-cfg:output}
[haproxy] [haproxy]
recipe = slapos.cookbook:haproxy
name = cloudooo
conf-path = ${directory:etc}/haproxy.cfg
socket-path = ${directory:run}/haproxy.sock socket-path = ${directory:run}/haproxy.sock
pidfile = ${directory:run}/haproxy.pid
ip = {{ ipv4 }} ip = {{ ipv4 }}
port = {{ haproxy_port }} port = {{ haproxy_port }}
maxconn = 1
wrapper-path = ${directory:services}/haproxy
binary-path = {{ parameter_dict['haproxy'] }}/sbin/haproxy
ctl-path = ${directory:bin}/haproxy-ctl
backend-list = backend-list =
{%- for section_name in cloudooo_section_list %} {%- for section_name in cloudooo_section_list %}
{{ "${" ~ section_name ~ ":ip}:${" ~ section_name ~ ":port}" }} {{ "${" ~ section_name ~ ":ip}:${" ~ section_name ~ ":port}" }}
...@@ -230,7 +238,6 @@ link-binary = ...@@ -230,7 +238,6 @@ link-binary =
{{ parameter_dict['poppler'] }}/bin/pdftohtml {{ parameter_dict['poppler'] }}/bin/pdftohtml
{{ parameter_dict['onlyoffice-core'] }}/bin/x2t {{ parameter_dict['onlyoffice-core'] }}/bin/x2t
# rest of parts are candidates for some generic stuff
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
apache-conf = ${:etc}/apache apache-conf = ${:etc}/apache
......
...@@ -92,11 +92,16 @@ zlib = ${zlib:location} ...@@ -92,11 +92,16 @@ zlib = ${zlib:location}
template-apache-conf = ${template-apache-backend-conf:target} template-apache-conf = ${template-apache-backend-conf:target}
template-logrotate-base = ${template-logrotate-base:output} template-logrotate-base = ${template-logrotate-base:output}
template-monitor = ${monitor2-template:output} template-monitor = ${monitor2-template:output}
template-haproxy-cfg = ${template-haproxy-cfg:target}
[template-cloudooo-instance] [template-cloudooo-instance]
recipe = slapos.recipe.build:download recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
[template-haproxy-cfg]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
[versions] [versions]
argparse = 1.4.0 argparse = 1.4.0
pyPdf = 1.13 pyPdf = 1.13
...@@ -8,9 +8,6 @@ parts = ...@@ -8,9 +8,6 @@ parts =
slapos-cookbook slapos-cookbook
instance.cfg instance.cfg
[python]
part = python2.7
[instance.cfg] [instance.cfg]
recipe = slapos.recipe.template recipe = slapos.recipe.template
output = ${buildout:directory}/${:_buildout_section_name_} output = ${buildout:directory}/${:_buildout_section_name_}
......
...@@ -173,15 +173,16 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin): ...@@ -173,15 +173,16 @@ class TestCrontabs(MariaDBTestCase, CrontabMixin):
'check-slow-query-pt-digest-result.py', 'check-slow-query-pt-digest-result.py',
)) ))
with self.assertRaises(subprocess.CalledProcessError) as error_context: with self.assertRaises(subprocess.CalledProcessError) as error_context:
subprocess.check_output('faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'], shell=True) subprocess.check_output(
'faketime 2050-01-01 %s' % check_slow_query_promise_plugin['command'],
text=True,
shell=True)
self.assertEqual( self.assertEqual(
error_context.exception.output, error_context.exception.output,
b"""\ "Threshold is lower than expected: \n"
Threshold is lower than expected: "Expected total queries : 1.0 and current is: 2\n"
Expected total queries : 1.0 and current is: 2 "Expected slowest query : 0.1 and current is: 3\n",
Expected slowest query : 0.1 and current is: 3 )
""")
class TestMariaDB(MariaDBTestCase): class TestMariaDB(MariaDBTestCase):
def test_utf8_collation(self): def test_utf8_collation(self):
......
...@@ -46,7 +46,7 @@ from slapos.testing.testcase import ( ...@@ -46,7 +46,7 @@ from slapos.testing.testcase import (
makeModuleSetUpAndTestCaseClass, makeModuleSetUpAndTestCaseClass,
) )
old_software_release_url = 'https://lab.nexedi.com/nexedi/slapos/raw/1.0.167.5/software/erp5/software.cfg' old_software_release_url = 'https://lab.nexedi.com/nexedi/slapos/raw/1.0.167.6/software/erp5/software.cfg'
new_software_release_url = os.path.abspath( new_software_release_url = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')) os.path.join(os.path.dirname(__file__), '..', 'software.cfg'))
......
...@@ -27,4 +27,4 @@ md5sum = 98faa5ad8cfb23a11d97a459078a1d05 ...@@ -27,4 +27,4 @@ md5sum = 98faa5ad8cfb23a11d97a459078a1d05
[template-runTestSuite] [template-runTestSuite]
filename = runTestSuite.in filename = runTestSuite.in
md5sum = 54d585d50a3464100611774db68b72c0 md5sum = 2bb3d71a0e04bc8bc828bb3f726ef3ff
...@@ -122,7 +122,7 @@ def main(): ...@@ -122,7 +122,7 @@ def main():
if ('jio' in test_suite): if ('jio' in test_suite):
url = f'{remote_access_url}/jio/test/tests.html' url = f'{remote_access_url}/jio/test/tests.html'
else: else:
url = f'{remote_access_url}/renderjs/test' url = f'{remote_access_url}/renderjs/test/'
is_browser_running = True is_browser_running = True
agent = browser.execute_script("return navigator.userAgent") agent = browser.execute_script("return navigator.userAgent")
......
...@@ -22,6 +22,7 @@ extends = ...@@ -22,6 +22,7 @@ extends =
parts = parts =
eggs/scripts eggs/scripts
python2.7-disabled
slapos-cookbook slapos-cookbook
template template
...@@ -251,6 +252,26 @@ branch = master ...@@ -251,6 +252,26 @@ branch = master
egg = slapos.core egg = slapos.core
setup = ${slapos.core-repository:location} setup = ${slapos.core-repository:location}
[python2.7-disabled]
# An "intentionally broken" python2 command that should catch
# accidental usage of things like #!/usr/bin/env python2
recipe = zc.recipe.egg
# we need an egg to generate a script, use the one from this part's recipe
eggs = ${:recipe}
interpreter = python2.7
entry-points =
python=${:eggs}:ignored
python2=${:eggs}:ignored
python2.7=${:eggs}:ignored
scripts =
python
python2
python2.7
initialization =
import sys
print("Error: attempt to use system python2", file=sys.stderr)
sys.exit(2)
[python-interpreter] [python-interpreter]
eggs += eggs +=
${lxml-python:egg} ${lxml-python:egg}
......
# Bogus Software
A bogus non-SR that only has a JSON schema so that requesting it works.
{
"name": "Bogus",
"description": "Bogus",
"serialisation": "json-in-xml",
"software-type": {}
}
{
"name": "Dummy",
"description": "Dummy",
"serialisation": "xml",
"software-type": {}
}
...@@ -275,9 +275,9 @@ class TestTheiaEmbeddedSlapOSShutdown(TheiaTestCase): ...@@ -275,9 +275,9 @@ class TestTheiaEmbeddedSlapOSShutdown(TheiaTestCase):
class TestTheiaWithEmbeddedInstance(TheiaTestCase): class TestTheiaWithEmbeddedInstance(TheiaTestCase):
sr_url = '~/bogus/sr/url.cfg' sr_url = os.path.abspath('dummy/software.cfg')
sr_type = 'bogus-type' sr_type = 'dummy-type'
sr_config = {"bogus": "yes"} sr_config = {"dummy": "yes"}
regexpr = re.compile(r"([\w/\-\.]+)\s+slaprunner\s+available") regexpr = re.compile(r"([\w/\-\.]+)\s+slaprunner\s+available")
@classmethod @classmethod
...@@ -326,9 +326,9 @@ class TestTheiaWithEmbeddedInstance(TheiaTestCase): ...@@ -326,9 +326,9 @@ class TestTheiaWithEmbeddedInstance(TheiaTestCase):
self.assertEmbedded(initial_sr_url, self.sr_type, self.sr_config) self.assertEmbedded(initial_sr_url, self.sr_type, self.sr_config)
# Change parameters for embedded instance # Change parameters for embedded instance
sr_url = '/bogus/sr/url-2.cfg' sr_url = os.path.abspath('bogus/software.cfg')
sr_type = 'bogus-type-2' sr_type = 'bogus-type'
sr_config = {"bogus-2": "true"} sr_config = {"bogus": "true"}
self.requestInstance( self.requestInstance(
self.getInstanceParameterDict(sr_url, sr_type, sr_config)) self.getInstanceParameterDict(sr_url, sr_type, sr_config))
self.waitForInstance() self.waitForInstance()
......
...@@ -35,7 +35,7 @@ url = ${:_profile_base_location_}/${:filename} ...@@ -35,7 +35,7 @@ url = ${:_profile_base_location_}/${:filename}
depends = ${caucase-jinja2-library-eggs:eggs} depends = ${caucase-jinja2-library-eggs:eggs}
[versions] [versions]
caucase = 0.9.12 caucase = 0.9.14
pem = 21.1.0 pem = 21.1.0
PyJWT = 2.4.0 PyJWT = 2.4.0
......
...@@ -61,6 +61,7 @@ extends = ...@@ -61,6 +61,7 @@ extends =
../../component/pygolang/buildout.cfg ../../component/pygolang/buildout.cfg
../../component/bcrypt/buildout.cfg ../../component/bcrypt/buildout.cfg
../../component/python-pynacl/buildout.cfg ../../component/python-pynacl/buildout.cfg
../../component/python-xmlsec/buildout.cfg
../../stack/caucase/buildout.cfg ../../stack/caucase/buildout.cfg
../../software/neoppod/software-common.cfg ../../software/neoppod/software-common.cfg
# keep neoppod extends last # keep neoppod extends last
...@@ -445,6 +446,7 @@ eggs = ${neoppod:eggs} ...@@ -445,6 +446,7 @@ eggs = ${neoppod:eggs}
${pandas:egg} ${pandas:egg}
${pillow-python:egg} ${pillow-python:egg}
${python-ldap-python:egg} ${python-ldap-python:egg}
${python-xmlsec:egg}
${pysvn-python:egg} ${pysvn-python:egg}
${pycrypto-python:egg} ${pycrypto-python:egg}
${scipy:egg} ${scipy:egg}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment