Commit 348f2411 authored by Arnaud Fontaine's avatar Arnaud Fontaine

Merge remote-tracking branch 'origin/zope4py2' into zope4py3

parents 3b345857 aa92afab
root = true
[*.{cfg,in,md,json}]
[*.{cfg,in,md,json,py}]
end_of_line = lf
charset = utf-8
insert_final_newline = true
trim_trailing_whitespace = true
[**.json]
[*.{json,py}]
indent_style = space
indent_size = 2
......
......@@ -7,7 +7,7 @@ extends =
../nghttp2/buildout.cfg
../gdbm/buildout.cfg
../libexpat/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../libxml2/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
......@@ -39,9 +39,9 @@ configure-options =
[apache]
recipe = slapos.recipe.cmmi
shared = true
version = 2.4.52
version = 2.4.53
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = a94ae42b84309d5ef6e613ae825b92fa
md5sum = f594f137137b5bdff3998dc17e3e9526
configure-options = --disable-static
--enable-authn-alias
--enable-bucketeer
......@@ -76,7 +76,7 @@ configure-options = --disable-static
--with-z=${zlib:location}
--with-expat=${libexpat:location}
--with-libxml2=${libxml2:location}/include/libxml2
--with-pcre=${pcre:location}
--with-pcre=${pcre:location}/bin/pcre-config
--with-sqlite3=${sqlite3:location}
--with-gdbm=${gdbm:location}
--with-nghttp2=${nghttp2:location}
......
......@@ -10,7 +10,7 @@ extends =
[boost-lib]
recipe = slapos.recipe.cmmi
shared = true
url = http://downloads.sourceforge.net/sourceforge/boost/boost_1_67_0.tar.bz2
url = https://boostorg.jfrog.io/artifactory/main/release/1.67.0/source/boost_1_67_0.tar.bz2
md5sum = ced776cb19428ab8488774e1415535ab
location = @@LOCATION@@
configure-command = ./bootstrap.sh --prefix=${:location} --without-icu $${PYTHON:+--with-python=$PYTHON}
......@@ -27,5 +27,5 @@ environment =
LZMA_LIBRARY_PATH=${xz-utils:location}/lib
patch-options = -p1
patches =
https://sources.debian.org/data/main/b/boost1.67/1.67.0-17/debian/patches/fix-ftbfs-python-3.3.patch#c85fb479d51354deafd1cc7af78f25d2
https://sources.debian.org/data/main/b/boost1.67/1.67.0-13+deb10u1/debian/patches/fix-ftbfs-python-3.3.patch#c85fb479d51354deafd1cc7af78f25d2
patch-binary = ${patch:location}/bin/patch
......@@ -12,7 +12,7 @@ extends =
[cyrus-sasl]
recipe = slapos.recipe.cmmi
shared = true
url = ftp://ftp.cyrusimap.org/cyrus-sasl/cyrus-sasl-2.1.26.tar.gz
url = https://github.com/cyrusimap/cyrus-sasl/releases/download/cyrus-sasl-2.1.26/cyrus-sasl-2.1.26.tar.gz
md5sum = a7f4e5e559a0e37b3ffc438c9456e425
location = @@LOCATION@@
patch-options = -p1
......
[buildout]
extends =
../xz-utils/buildout.cfg
parts =
diffutils
[diffutils]
recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.gnu.org/gnu/diffutils/diffutils-3.8.tar.xz
md5sum = 6a6b0fdc72acfe3f2829aab477876fbc
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
......@@ -18,7 +18,7 @@ recipe = slapos.recipe.cmmi
md5sum = 0284ea239083f04c8b874e08e1aca243
# XXX: We still use an old version of dropbear instead of the last one
# in order have all patches working.
url = http://matt.ucc.asn.au/dropbear/releases/dropbear-0.53.1.tar.bz2
url = https://matt.ucc.asn.au/dropbear/releases/dropbear-0.53.1.tar.bz2
configure-options =
--with-zlib=${zlib:location}
......
......@@ -148,7 +148,7 @@ library =
${libpng:location}/lib
${libSM:location}/lib
${libtool:location}/lib
${libuuid:location}/lib
${util-linux:location}/lib
${libX11:location}/lib
${libXau:location}/lib
${libxcb:location}/lib
......
......@@ -31,25 +31,23 @@ md5sum = 5c781723a0d9ed6188960defba8e91cf
# http://ipafont.ipa.go.jp/
[ipaex-fonts]
<= fonts-base
url = http://dl.sourceforge.jp/ipafonts/57330/IPAexfont00201.zip
url = https://osdn.net/frs/redir.php?f=ipafonts%2F57330%2FIPAexfont00201.zip
md5sum = 7bf84182a04a9632268dbcb03f100d05
[ipa-fonts]
<= fonts-base
url = http://dl.sourceforge.jp/ipafonts/51868/IPAfont00303.zip
url = https://osdn.net/frs/redir.php?f=ipafonts%2F51868%2FIPAfont00303.zip
md5sum = 39a828acf27790adbe4944dfb4d94bb1
[ocrb-fonts]
<= fonts-base
url = http://sourceforge.jp/frs/redir.php?m=jaist&f=%2Ftsukurimashou%2F56948%2Focr-0.2.zip
url = https://osdn.net/frs/redir.php?f=tsukurimashou%2F56948%2Focr-0.2.zip
md5sum = 9f2acd83291a31dbe053912f4115db75
[android-fonts]
<= fonts-base
url = ftp://ftp.free.fr/mirrors/ftp.debian.org/pool/main/f/fonts-android/fonts-android_4.3.orig.tar.xz
url = http://archive.debian.org/debian-archive/debian/pool/main/f/fonts-android/fonts-android_4.3.orig.tar.xz
md5sum = 2d41d5342eb5f61591ddeec5b80da74d
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
# The DejaVu fonts are a font family based upon Bitstream Vera v1.10. Its purpose is to
# provide a wider range of characters while maintaining the original look-and-feel
......
......@@ -18,8 +18,8 @@ parts =
[git]
recipe = slapos.recipe.cmmi
shared = true
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.33.1.tar.xz
md5sum = 3462f34d9c17288eee854b7645f6a0a1
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.35.1.tar.xz
md5sum = 3aae077280b6be861e3c1c637491853a
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
......@@ -21,8 +21,8 @@ environment-extra =
[libgpg-error]
<= gpg-common
version = 1.42
md5sum = 133fed221ba8f63f5842858a1ff67cb3
version = 1.44
md5sum = 3956969812cd4fbd133b79c5b5a2e7f7
configure-options-extra =
--disable-doc
--disable-tests
......
......@@ -5,7 +5,7 @@ parts = icu4c
recipe = slapos.recipe.cmmi
shared = true
location = @@LOCATION@@
url = http://download.icu-project.org/files/icu4c/58.2/icu4c-58_2-src.tgz
url = https://github.com/unicode-org/icu/releases/download/release-58-2/icu4c-58_2-src.tgz
md5sum = fac212b32b7ec7ab007a12dff1f3aea1
configure-command = source/configure
configure-options =
......@@ -20,8 +20,3 @@ patches =
[icu4c-58.2]
<= icu4c
[icu4c-55.1]
<= icu4c
url = http://download.icu-project.org/files/icu4c/55.1/icu4c-55_1-src.tgz
md5sum = e2d523df79d6cb7855c2fbe284f4db29
......@@ -41,7 +41,7 @@ environment =
[inkscape]
recipe = slapos.recipe.cmmi
shared = true
url = https://inkscape.org/gallery/item/13330/inkscape-0.92.4_A6N0YOn.tar.bz2
url = https://media.inkscape.org/dl/resources/file/inkscape-0.92.4.tar.bz2
md5sum = ac30f6d5747fd9c620c00dad500f414f
pkg_config_depends = ${freetype:location}/lib/pkgconfig:${gtkmm:location}/lib/pkgconfig:${gtkmm:pkg_config_depends}:${gsl:location}/lib/pkgconfig:${popt:location}/lib/pkgconfig:${garbage-collector:location}/lib/pkgconfig:${libxslt:location}/lib/pkgconfig
configure-command = ${cmake:location}/bin/cmake
......
[buildout]
extends =
../attr/buildout.cfg
parts = libcap-ng
[libcap-ng]
recipe = slapos.recipe.cmmi
shared = true
url = https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-0.8.2.tar.gz
#url = https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-${:version}.tar.gz
# ERROR: The certificate of 'people.redhat.com' doesn't have a known issuer.
url = http://sources.buildroot.net/libcap-ng/libcap-ng-${:version}.tar.gz
version = 0.8.2
md5sum = faf1ef766cf068ad1aba4008ced665f7
location = @@LOCATION@@
configure-options =
--with-python=no
--with-python3=no
......
......@@ -6,7 +6,7 @@ extends =
../pkgconfig/buildout.cfg
../glib/buildout.cfg
../gettext/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../xz-utils/buildout.cfg
......
......@@ -5,7 +5,7 @@ parts =
[libsodium]
recipe = slapos.recipe.cmmi
shared = true
url = https://download.libsodium.org/libsodium/releases/libsodium-1.0.8.tar.gz
url = https://download.libsodium.org/libsodium/releases/old/unsupported/libsodium-1.0.8.tar.gz
md5sum = 0a66b86fd3aab3fe4c858edcd2772760
configure-options =
--disable-static
[buildout]
parts =
libuuid
extends =
../perl/buildout.cfg
[libuuid]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.kernel.org/pub/linux/utils/util-linux/v2.37/util-linux-2.37.2.tar.xz
md5sum = d659bf7cd417d93dc609872f6334b019
configure-options =
--disable-static
--disable-all-programs
--enable-libuuid
--without-libiconv-prefix
--without-libintl-prefix
--without-ncurses
--without-slang
--without-pam
--without-selinux
--without-audit
environment =
PATH=${perl:location}/bin:%(PATH)s
[buildout]
extends =
extends =
../bison/buildout.cfg
../rrdtools/buildout.cfg
../flex/buildout.cfg
......@@ -11,12 +11,12 @@ parts =
[lmsensors]
recipe = slapos.recipe.cmmi
url = https://src.fedoraproject.org/repo/pkgs/lm_sensors/lm_sensors-3.3.5.tar.bz2/da506dedceb41822e64865f6ba34828a/lm_sensors-3.3.5.tar.bz2
md5sum = da506dedceb41822e64865f6ba34828a
url = http://dl.lm-sensors.org/lm-sensors/releases/lm_sensors-3.3.5.tar.bz2
configure-command = true
make-options =
PREFIX=${buildout:parts-directory}/${:_buildout_section_name_}
ETCDIR=${buildout:parts-directory}/${:_buildout_section_name_}/etc
environment =
environment =
PATH=${bison:location}/bin:${flex:location}/bin:%(PATH)s
......@@ -5,7 +5,7 @@ parts =
[lunzip]
recipe = slapos.recipe.cmmi
shared = true
url = http://download.savannah.gnu.org/releases-redirect/lzip/lunzip/lunzip-1.12.tar.gz
md5sum = 09caf2475c58aa40e94b599a4f7a2d13
url = http://download-mirror.savannah.gnu.org/releases/lzip/lunzip/lunzip-1.13.tar.gz
md5sum = 4bc15e65fef99db64e27f4cd369ae02e
configure-options =
--disable-static
[buildout]
extends =
../pygolang/buildout.cfg
[virtual-env-base]
recipe = slapos.recipe.build
_name = ${:_buildout_section_name_}
init =
from zc.buildout.easy_install import working_set
import os
name = options['_name']
eggs = options['eggs']
try:
scripts = "scripts = " + options['scripts']
except KeyError:
scripts = ""
self.buildout.parse("""
[.%(name)s.install-eggs]
recipe = zc.recipe.egg
eggs = %(eggs)s
%(scripts)s
[.%(name)s.install-interpreter]
<= python-interpreter
eggs += %(eggs)s
""" % locals())
install =
with open(location, "w") as f:
f.write(options['template'] % {
"path" : self.buildout['buildout']['bin-directory'],
"name" : self.name,
})
# Template virtual env for bash shell in posix
[virtual-env-base:posix]
template =
deactivate () {
set PATH PS1
while [ "$1" ]; do
eval "if [ \"\$_OLD_VENV_$1\" ]; then $1=\$_OLD_VENV_$1; else unset $1; fi; unset \$_OLD_VENV_$1"
shift
done
unset -f deactivate
}
VENV_PATH=%(path)s
_OLD_VENV_PATH=$PATH
_OLD_VENV_PS1=$PS1
PATH=$VENV_PATH:$PATH
PS1='(%(name)s) '$PS1
......@@ -14,7 +14,7 @@ parts =
[make3.81-debian]
recipe = slapos.recipe.cmmi
url = http://ftp.de.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_3.81.orig.tar.gz
url = http://archive.debian.org/debian-archive/debian/pool/main/m/make-dfsg/make-dfsg_3.81.orig.tar.gz
md5sum = 7c93b1ab4680eb21c2c13f4f47741e2d
shared = true
patches =
......
......@@ -81,7 +81,7 @@ environment =
PATH=${patch:location}/bin:%(PATH)s
patch-options = -p1
patches =
https://sources.debian.org/data/main/m/mariadb-10.3/1:10.3.22-0+deb10u1/debian/patches/0024-Revert-to-using-system-pcre-library.patch#1c6a0f2634f5a56122299674b77b1131
https://sources.debian.org/data/main/m/mariadb-10.3/1:10.3.34-0+deb10u1/debian/patches/0024-Revert-to-using-system-pcre-library.patch#1c6a0f2634f5a56122299674b77b1131
post-install =
ldd=`ldd %(location)s/lib/plugin/ha_rocksdb.so`
for x in ${lz4:location} ${snappy:location} ${zstd:location}
......
......@@ -10,9 +10,8 @@ extends =
[nano]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.nano-editor.org/dist/v2.8/nano-2.8.4.tar.xz
md5sum = 02ff28870194178595b287fc16fa611b
location = @@LOCATION@@
url = https://www.nano-editor.org/dist/v6/nano-6.2.tar.xz
md5sum = 12784a5c245518d7580125ebbd6b7601
# The dummy PKG_CONFIG is in the case that both pkg-config and ncursesw
# are installed on the system.
environment=
......@@ -21,5 +20,6 @@ environment=
CPPFLAGS=-I${file:location}/include -I${zlib:location}/include
LDFLAGS=-L${file:location}/lib/ -Wl,-rpath=${file:location}/lib/ -L${zlib:location}/lib/ -Wl,-rpath=${zlib:location}/lib/
post-install =
cd ${:location} && mkdir etc &&
echo include "${:location}/share/nano/*.nanorc" > etc/nanorc
cd %(location)s
mkdir etc
echo 'include %(location)s/share/nano/*.nanorc' > etc/nanorc
......@@ -7,6 +7,7 @@ extends =
../openssl/buildout.cfg
../zlib/buildout.cfg
../python-2.7/buildout.cfg
../python3/buildout.cfg
parts =
nodejs
......@@ -14,6 +15,17 @@ parts =
#[nodejs]
#<= nodejs-X.Y.Z
# nodejs 16 needs gcc > 8.3
[nodejs-16.13.2]
<= nodejs-base
openssl_location = ${openssl:location}
version = v16.13.2
md5sum = ae3a05fc273536f83c685a7425a7882d
patches =
https://raw.githubusercontent.com/nxhack/openwrt-node-packages/9e3ab4cc9fd5f19c25ccd6f19be5a9b47e2c6933/node/patches/v16.x/010-execvp-arg-list-too-long.patch#17bb14ea3a1b5b4832e3680e4edfeded
patch-options = -p1
PATH = ${pkgconfig:location}/bin:${python3:location}/bin:${patch:location}/bin/:%(PATH)s
[nodejs-14.16.0]
<= nodejs-base
openssl_location = ${openssl:location}
......@@ -66,16 +78,3 @@ environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${:openssl_location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
LD_LIBRARY_PATH=${:openssl_location}/lib
[npm]
# Node.js Package Manager
# Deprecated. Included in node >= 0.6.3.
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
commit = 3136abc5c6b3ed332c4700ece24450fada63639b
origin = https://github.com/isaacs/npm.git
git-bin = ${git:location}/bin/git
node-bin = ${nodejs-0.4:location}/bin/node
command = (GIT_SSL_NO_VERIFY=true ${:git-bin} clone --quiet ${:origin} ${:location} && cd ${:location} && ${:git-bin} reset --hard ${:commit} && ${:location}/configure --prefix=${:location} && GIT_SSL_NO_VERIFY=true ${:git-bin} submodule update --init --recursive && ${:node-bin} cli.js install npm@1.0.106 -g -f) || (rm -fr ${:location}; exit 1)
update-command =
......@@ -17,8 +17,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.openssl.org/source/openssl-1.1.1l.tar.gz
md5sum = ac0d4387f3ba0ad741b0580dd45f6ff3
url = https://www.openssl.org/source/openssl-1.1.1n.tar.gz
md5sum = 2aad5635f9bb338bc2c6b7d19cbc9676
location = @@LOCATION@@
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with DESTDIR). Used by slapos.package.git/obs
......
......@@ -7,7 +7,7 @@ parts =
[pcre]
recipe = slapos.recipe.cmmi
shared = true
url = https://ftp.pcre.org/pub/pcre/pcre-8.45.tar.bz2
url = https://download.sourceforge.net/pcre/pcre/8.45/pcre-8.45.tar.bz2
md5sum = 4452288e6a0eefb2ab11d36010a1eebb
configure-options =
--disable-static
......
......@@ -11,7 +11,7 @@ shared = false
<= perl-CPAN-package
# XXX it's not on CPAN, so we use url
version = 3.0.3
url = https://www.percona.com/downloads/percona-toolkit/${:version}/source/tarball/percona-toolkit-${:version}.tar.gz
url = https://downloads.percona.com/downloads/percona-toolkit/${:version}/source/tarball/percona-toolkit-${:version}.tar.gz
md5sum = 8af181994fdf9aa984475637861098e9
inc = ${perl-DBI:site_perl}:${perl-DBD-mariadb:site_perl}
......@@ -14,7 +14,7 @@ extends =
[poppler]
recipe = slapos.recipe.cmmi
shared = true
url = http://poppler.freedesktop.org/poppler-0.43.0.tar.xz
url = https://poppler.freedesktop.org/poppler-0.43.0.tar.xz
md5sum = 1d2b001663119855cdfbc0713dbfb9c6
configure-options =
--disable-cairo-output
......
......@@ -25,7 +25,7 @@ parts =
recipe = slapos.recipe.cmmi
shared = true
version = v2.3.2
url = https://github.com/sysown/proxysql/archive/${:version}.tar.gz
url = https://github.com/sysown/proxysql/archive/refs/tags/${:version}.tar.gz
md5sum = 969129ac43c9f64641509891a116e0e1
configure-command = true
make-options = GIT_VERSION=${:version}
......
......@@ -8,7 +8,7 @@ extends =
../libcap-ng/buildout.cfg
../libpng/buildout.cfg
../liburing/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../meson/buildout.cfg
../ncurses/buildout.cfg
../ninja/buildout.cfg
......
......@@ -10,7 +10,7 @@ parts =
recipe = slapos.recipe.cmmi
shared = true
location = @@LOCATION@@
url = http://download.qt.io/official_releases/qt/5.6/5.6.2/submodules/qtbase-opensource-src-5.6.2.tar.gz
url = https://download.qt.io/new_archive/qt/5.6/5.6.2/submodules/qtbase-opensource-src-5.6.2.tar.gz
md5sum = 7aa5841b50c411e23e31e8a6cc1c6981
configure-command = ./configure
configure-options =
......@@ -42,7 +42,7 @@ post-install =
# qmake binary can be reached directly from ${qt:location}/bin/qmake if [qt] is fully built
recipe = slapos.recipe.cmmi
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = http://download.qt.io/archive/qt/4.8/4.8.7/qt-everywhere-opensource-src-4.8.7.tar.gz
url = https://download.qt.io/new_archive/qt/4.8/4.8.7/qt-everywhere-opensource-src-4.8.7.tar.gz
md5sum = d990ee66bf7ab0c785589776f35ba6ad
# see https://github.com/NixOS/nixpkgs/blob/3e387c3e005c87566b5403d24c86f71f4945a79b/pkgs/development/libraries/qt-4.x/4.8/default.nix#L101
pre-configure =
......
......@@ -11,7 +11,7 @@ extends =
../pcre/buildout.cfg
../libffi/buildout.cfg
../zlib/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
# compilation
../git/buildout.cfg
......
......@@ -12,7 +12,7 @@ parts =
[r-language]
recipe = slapos.recipe.cmmi
url = http://cran.univ-paris1.fr/src/base/R-3/R-3.2.1.tar.gz
url = https://cran.r-project.org/src/base/R-3/R-3.2.1.tar.gz
md5sum = c2aac8b40f84e08e7f8c9068de9239a3
configure-options =
--enable-R-shlib
......
......@@ -5,7 +5,7 @@ extends =
../curl/buildout.cfg
../libestr/buildout.cfg
../libfastjson/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../zlib/buildout.cfg
[rsyslogd]
......
......@@ -3,8 +3,9 @@ parts =
serf
extends =
../apache/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../openssl/buildout.cfg
../python-2.7/buildout.cfg
../zlib/buildout.cfg
[serf]
......@@ -13,7 +14,7 @@ shared = true
url = https://archive.apache.org/dist/serf/serf-1.3.9.tar.bz2
md5sum = 370a6340ff20366ab088012cd13f2b57
scons-command =
python ${scons:location}/scons.py \
${python2.7:location}/bin/python ${scons:location}/scons.py \
APR="${apr:location}" \
APU="${apr-util:location}" \
OPENSSL="${openssl:location}" \
......
......@@ -8,7 +8,7 @@ parts =
[socat]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.dest-unreach.org/socat/download/socat-${:version}.tar.gz
url = http://www.dest-unreach.org/socat/download/Archive/socat-${:version}.tar.gz
version = 1.7.3.2
md5sum = aec3154f7854580cfab0c2d81e910519
environment =
......
......@@ -8,8 +8,8 @@ parts =
[sqlite3]
recipe = slapos.recipe.cmmi
shared = true
url = https://sqlite.org/2021/sqlite-autoconf-3360000.tar.gz
md5sum = f5752052fc5b8e1b539af86a3671eac7
url = https://sqlite.org/2022/sqlite-autoconf-3370200.tar.gz
md5sum = 683cc5312ee74e71079c14d24b7a6d27
configure-options =
--disable-static
--enable-readline
......
......@@ -5,7 +5,7 @@
extends =
../apache/buildout.cfg
../libexpat/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
......
......@@ -4,6 +4,7 @@ extends =
../libsecret/buildout.cfg
../pkgconfig/buildout.cfg
../patchelf/buildout.cfg
../python3/buildout.cfg
../yarn/buildout.cfg
download-plugins.cfg
buildout.hash.cfg
......@@ -27,7 +28,7 @@ path = ${yarn.lock:location}
# path = ${package.json:location}
environment =
TMPDIR=@@LOCATION@@/tmp
PATH=${nodejs:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PATH=${nodejs:location}/bin:${pkgconfig:location}/bin:${python3:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${libsecret:pkg-config-path}
LDFLAGS=-Wl,-rpath=${libsecret:location}/lib -L${gettext:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${glib:location}/lib
pre-configure =
......
......@@ -19,4 +19,4 @@ md5sum = 8157c22134200bd862a07c6521ebf799
[yarn.lock]
_update_hash_filename_ = yarn.lock
md5sum = e0341b7a715cc757a671aef902e6767d
md5sum = 7c6a0103f9b07cf51940f25b8e3a5730
......@@ -26,7 +26,7 @@ urls = vscode-bat https://open-vsx.org/api/vscode/bat/1.62.3/file/vscode.bat-1.6
vscode-jake https://open-vsx.org/api/vscode/jake/1.62.3/file/vscode.jake-1.62.3.vsix fb7d81d41eaa04d53183bd6163e83ed3
vscode-java https://open-vsx.org/api/vscode/java/1.62.3/file/vscode.java-1.62.3.vsix d8239ae11719762e8b8152010bca50c9
vscode-javascript https://open-vsx.org/api/vscode/javascript/1.62.3/file/vscode.javascript-1.62.3.vsix 6f5babbbd2f437f2e34df2feb97ce595
ms-vscode-js-debug https://open-vsx.org/api/ms-vscode/js-debug/1.61.0/file/ms-vscode.js-debug-1.61.0.vsix 6457840c5c8d43e5b7a39fabf72b7367
ms-vscode-js-debug https://open-vsx.org/api/ms-vscode/js-debug/1.66.1/file/ms-vscode.js-debug-1.66.1.vsix 67bb6b75963b92fed5152598a4f969ae
vscode-json https://open-vsx.org/api/vscode/json/1.62.3/file/vscode.json-1.62.3.vsix 9e0e7b25a2d4c5df88ea3a75cf60372b
vscode-json-language-features https://open-vsx.org/api/vscode/json-language-features/1.45.1/file/vscode.json-language-features-1.45.1.vsix b7aa9d96d67792dedf9c2558880f38c0
vscode-less https://open-vsx.org/api/vscode/less/1.62.3/file/vscode.less-1.62.3.vsix 9eb5e65d5d089abae8fc7cb2ebcdd53a
......@@ -70,12 +70,12 @@ urls = vscode-bat https://open-vsx.org/api/vscode/bat/1.62.3/file/vscode.bat-1.6
vscode-yaml https://open-vsx.org/api/vscode/yaml/1.62.3/file/vscode.yaml-1.62.3.vsix f9719aaae4fd9671fd2fa15d41343e1d
EditorConfig-EditorConfig https://open-vsx.org/api/EditorConfig/EditorConfig/0.16.6/file/EditorConfig.EditorConfig-0.16.6.vsix e787245e6c68617178ae995ad97c3ccb
dbaeumer-vscode-eslint https://open-vsx.org/api/dbaeumer/vscode-eslint/2.1.20/file/dbaeumer.vscode-eslint-2.1.20.vsix 1cb024ac02ebeb5ce6b0dfed6e51cdd2
ms-vscode-references-view https://open-vsx.org/api/ms-vscode/references-view/0.0.82/file/ms-vscode.references-view-0.0.82.vsix d23827c6600ae821c829fca1629968c7
ms-vscode-references-view https://open-vsx.org/api/ms-vscode/references-view/0.0.89/file/ms-vscode.references-view-0.0.89.vsix 7ec05cb01a77ee7f6c5198a5225fa707
vscjava-vscode-java-debug https://open-vsx.org/api/vscjava/vscode-java-debug/0.29.0/file/vscjava.vscode-java-debug-0.29.0.vsix 1eb95110f84ff8dcabbe3c672066b86d
redhat-java https://open-vsx.org/api/redhat/java/0.61.0/file/redhat.java-0.61.0.vsix 72e548e2845e1ff655f28111558d6942
vscjava-vscode-java-test https://open-vsx.org/api/vscjava/vscode-java-test/0.26.0/file/vscjava.vscode-java-test-0.26.0.vsix fd63da5537a4bee1d3ceaae0fa6bf419
ms-python-python https://open-vsx.org/api/ms-python/python/2020.9.112786/file/ms-python.python-2020.9.112786.vsix c64b79fa822418e07b6d0f57b8838b44
perrinjerome-vscode-zc-buildout https://open-vsx.org/api/perrinjerome/vscode-zc-buildout/0.6.2/file/perrinjerome.vscode-zc-buildout-0.6.2.vsix 76c1420f238a0754a505459563220973
perrinjerome-vscode-zc-buildout https://open-vsx.org/api/perrinjerome/vscode-zc-buildout/0.7.0/file/perrinjerome.vscode-zc-buildout-0.7.0.vsix 7598fa3c1c3701cb2da5c330fe996ff1
jebbs-plantuml https://open-vsx.org/api/jebbs/plantuml/2.14.0/file/jebbs.plantuml-2.14.0.vsix 13fa7cbd14a30ecca166c41a307c7a73
rafaelmaiolla-diff https://open-vsx.org/api/rafaelmaiolla/diff/0.0.1/file/rafaelmaiolla.diff-0.0.1.vsix 1d8f868bc19b7d703c1be2bf99c4c7f9
perrinjerome-git-commit-syntax https://open-vsx.org/api/perrinjerome/git-commit-syntax/0.0.1/file/perrinjerome.git-commit-syntax-0.0.1.vsix 46625f2f05e244911c2cb9cc5032c0ef
......
This source diff could not be displayed because it is too large. You can view the blob instead.
# Tig: text-mode interface for Git
# http://jonas.nitro.dk/tig/
# https://jonas.github.io/tig/
[buildout]
extends =
../libiconv/buildout.cfg
../ncurses/buildout.cfg
[tig]
recipe = slapos.recipe.cmmi
shared = true
url = http://jonas.nitro.dk/tig/releases/tig-2.1.tar.gz
md5sum = d6c237aba2c03d85897da79789fd6104
url = https://github.com/jonas/tig/releases/download/tig-2.5.5/tig-2.5.5.tar.gz
md5sum = 0902ba706e8efaf6c2087d8b66393375
environment =
CFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
CFLAGS=-I${ncurses:location}/include -I${libiconv:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${libiconv:location}/lib -Wl,-rpath=${libiconv:location}/lib
......@@ -45,6 +45,7 @@ patch-options = -p1
# (see https://github.com/apache/trafficserver/issues/8539 for the detail)
patches =
${:_profile_base_location_}/trafficserver-9.1.1-TSHttpTxnCacheLookupStatusGet-fix.patch#d8ed3db3a48e97eb72aaaf7d7598a2d2
${:_profile_base_location_}/trafficserver-9.1.1-via-string-rapid-cdn.patch#8c39243d7525222385d5964485734f99
environment =
PATH=${libtool:location}/bin:${make:location}/bin:${patch:location}/bin:${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
LDFLAGS =-L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${tcl:location}/lib -Wl,-rpath=${tcl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${luajit:location}/lib -lm
......
diff -ur trafficserver-9.1.1.orig/proxy/http/HttpTransactHeaders.cc trafficserver-9.1.1/proxy/http/HttpTransactHeaders.cc
--- trafficserver-9.1.1.orig/proxy/http/HttpTransactHeaders.cc 2022-02-09 12:21:56.591350540 +0100
+++ trafficserver-9.1.1/proxy/http/HttpTransactHeaders.cc 2022-03-07 13:02:31.503849619 +0100
@@ -758,15 +758,6 @@
write_hdr_protocol_stack(via_string, via_limit - via_string, ProtocolStackDetail::Standard, proto_buf.data(), n_proto);
*via_string++ = ' ';
- via_string += nstrcpy(via_string, s->http_config_param->proxy_hostname);
-
- *via_string++ = '[';
- memcpy(via_string, Machine::instance()->uuid.getString(), TS_UUID_STRING_LEN);
- via_string += TS_UUID_STRING_LEN;
- *via_string++ = ']';
- *via_string++ = ' ';
- *via_string++ = '(';
-
memcpy(via_string, s->http_config_param->proxy_request_via_string, s->http_config_param->proxy_request_via_string_len);
via_string += s->http_config_param->proxy_request_via_string_len;
@@ -793,7 +784,6 @@
}
}
- *via_string++ = ')';
*via_string = 0;
ink_assert((size_t)(via_string - new_via_string) < (sizeof(new_via_string) - 1));
@@ -848,10 +838,6 @@
write_hdr_protocol_stack(via_string, via_limit - via_string, ProtocolStackDetail::Standard, proto_buf.data(), n_proto);
*via_string++ = ' ';
- via_string += nstrcpy(via_string, s->http_config_param->proxy_hostname);
- *via_string++ = ' ';
- *via_string++ = '(';
-
memcpy(via_string, s->http_config_param->proxy_response_via_string, s->http_config_param->proxy_response_via_string_len);
via_string += s->http_config_param->proxy_response_via_string_len;
@@ -877,7 +863,6 @@
}
}
- *via_string++ = ')';
*via_string = 0;
ink_assert((size_t)(via_string - new_via_string) < (sizeof(new_via_string) - 1));
......@@ -5,7 +5,7 @@ extends =
../libtool/buildout.cfg
../git/buildout.cfg
../openssl/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
parts = accords
......
......@@ -4,7 +4,7 @@ parts = apache-perl perl-Apache2-Request
extends =
../apache/buildout.cfg
../perl/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
[apache-perl]
# Note: Shall react on each build of apache and reinstall itself
......
......@@ -7,8 +7,8 @@ extends =
[util-linux]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.kernel.org/pub/linux/utils/util-linux/v2.37/util-linux-2.37.1.tar.xz
md5sum = 6d244f0f59247e9109f47d6e5dd0556b
url = https://www.kernel.org/pub/linux/utils/util-linux/v2.37/util-linux-2.37.2.tar.xz
md5sum = d659bf7cd417d93dc609872f6334b019
configure-options =
--disable-static
--enable-libuuid
......@@ -50,3 +50,21 @@ environment =
PATH=${perl:location}/bin:${xz-utils:location}/bin:%(PATH)s
LDFLAGS=-L${libcap-ng:location}/lib -Wl,-rpath=${libcap-ng:location}/lib
CFLAGS=-I${libcap-ng:location}/include
[libuuid]
# libuuid is inside util-linux source code with only libuuid feature enabled.
<= util-linux
configure-options =
--disable-static
--disable-all-programs
--enable-libuuid
--without-libiconv-prefix
--without-libintl-prefix
--without-ncurses
--without-slang
--without-pam
--without-selinux
--without-audit
environment =
PATH=${perl:location}/bin:%(PATH)s
[buildout]
extends =
extends =
../freetype/buildout.cfg
../fontconfig/buildout.cfg
../libpng/buildout.cfg
......@@ -13,7 +13,7 @@ parts =
[wkhtmltopdf]
recipe = slapos.recipe.build
url = http://download.gna.org/wkhtmltopdf/0.12/0.12.4/wkhtmltox-0.12.4_${:_url}.tar.xz
url = https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.4/wkhtmltox-0.12.4_${:_url}.tar.xz
environment =
PATH=${xz-utils:location}/bin:%(PATH)s
install =
......
......@@ -3,7 +3,7 @@ parts =
xapian
extends =
../zlib/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
[xapian]
recipe = slapos.recipe.cmmi
......
......@@ -10,7 +10,7 @@ extends =
../icu/buildout.cfg
../intltool/buildout.cfg
../libtool/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
../libxml2/buildout.cfg
../libxslt/buildout.cfg
../meson/buildout.cfg
......@@ -134,7 +134,7 @@ environment =
[libXext]
recipe = slapos.recipe.cmmi
shared = true
url = http://ftp.x.org/pub/individual/lib/libXext-1.3.3.tar.bz2
url = https://ftp.x.org/pub/individual/lib/libXext-1.3.3.tar.bz2
md5sum = 52df7c4c1f0badd9f82ab124fb32eb97
pkg_config_depends = ${libX11:location}/lib/pkgconfig:${libX11:pkg_config_depends}
environment =
......@@ -206,7 +206,7 @@ configure-options =
[fixesproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/fixesproto-5.0.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/fixesproto-5.0.tar.bz2
md5sum = e7431ab84d37b2678af71e29355e101d
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -215,7 +215,7 @@ environment =
[bigreqsproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/bigreqsproto-1.1.2.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/bigreqsproto-1.1.2.tar.bz2
md5sum = 1a05fb01fa1d5198894c931cf925c025
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -224,7 +224,7 @@ environment =
[xcmiscproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/xcmiscproto-1.2.2.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/xcmiscproto-1.2.2.tar.bz2
md5sum = 5f4847c78e41b801982c8a5e06365b24
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -233,7 +233,7 @@ environment =
[damageproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/damageproto-1.2.1.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/damageproto-1.2.1.tar.bz2
md5sum = 998e5904764b82642cc63d97b4ba9e95
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -261,7 +261,7 @@ environment =
[renderproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/renderproto-0.11.1.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/renderproto-0.11.1.tar.bz2
md5sum = a914ccc1de66ddeb4b611c6b0686e274
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -292,7 +292,7 @@ environment =
[recordproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/recordproto-1.14.2.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/recordproto-1.14.2.tar.bz2
md5sum = 1b4e5dede5ea51906f1530ca1e21d216
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -301,7 +301,7 @@ environment =
[resourceproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/resourceproto-1.2.0.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/resourceproto-1.2.0.tar.bz2
md5sum = cfdb57dae221b71b2703f8e2980eaaf4
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -310,7 +310,7 @@ environment =
[xineramaproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/xineramaproto-1.2.1.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/xineramaproto-1.2.1.tar.bz2
md5sum = 9959fe0bfb22a0e7260433b8d199590a
environment =
PKG_CONFIG_PATH=${xorg-util-macros:location}/share/pkgconfig
......@@ -396,7 +396,7 @@ environment =
[renderext]
recipe = slapos.recipe.cmmi
shared = true
url = http://xlibs.freedesktop.org/release/renderext-0.9.tar.bz2
url = https://xlibs.freedesktop.org/release/renderext-0.9.tar.bz2
md5sum = d43c2afc69937655d13c02588c9ff974
[libXrender]
......@@ -537,7 +537,7 @@ environment =
[compositeproto]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.x.org/releases/X11R7.7/src/everything/compositeproto-0.4.2.tar.bz2
url = https://www.x.org/releases/X11R7.7/src/everything/compositeproto-0.4.2.tar.bz2
md5sum = 98482f65ba1e74a08bf5b056a4031ef0
[libXcomposite]
......
......@@ -5,11 +5,11 @@ extends =
../nodejs/buildout.cfg
[yarn]
<= yarn-1.22.10
<= yarn-1.22.15
[yarn-1.22.10]
[yarn-1.22.15]
<= yarn-wrapper
yarn-download = ${yarn-download-1.22.10:location}
yarn-download = ${yarn-download-1.22.15:location}
[yarn-1.17.3]
<= yarn-wrapper
......@@ -43,10 +43,10 @@ recipe = slapos.recipe.build:download-unpacked
shared = true
url = https://github.com/yarnpkg/yarn/releases/download/v${:version}/yarn-v${:version}.tar.gz
[yarn-download-1.22.10]
[yarn-download-1.22.15]
<= yarn-download
version = 1.22.10
md5sum = 52e8dbe9d0cb90683dd3ee2ebf2becb8
version = 1.22.15
md5sum = 4113da7ab81a77fb30f74737a459a225
[yarn-download-1.17.3]
<= yarn-download
......
[buildout]
extends =
../libtool/buildout.cfg
../libuuid/buildout.cfg
../util-linux/buildout.cfg
[zeromq]
<= zeromq3
......
......@@ -5,5 +5,5 @@ parts =
[zlib]
recipe = slapos.recipe.cmmi
shared = true
url = http://downloads.sourceforge.net/project/libpng/zlib/1.2.11/zlib-1.2.11.tar.gz
md5sum = 1c9f62f0778697a09d36121ead88e08e
url = https://www.zlib.net/zlib-1.2.12.tar.gz
md5sum = 5fc414a9726be31427b440b434d05f78
......@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '1.0.226'
version = '1.0.238'
name = 'slapos.cookbook'
long_description = open("README.rst").read()
......
......@@ -24,7 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from urlparse import urlparse
from six.moves.urllib.parse import urlparse
from slapos.recipe.librecipe import GenericBaseRecipe
......
......@@ -34,6 +34,7 @@ import string, random
import json
import traceback
from slapos import slap
from slapos.util import binFromIpv6
class Recipe(GenericBaseRecipe):
......@@ -54,20 +55,8 @@ class Recipe(GenericBaseRecipe):
return GenericBaseRecipe.__init__(self, buildout, name, options)
def getSerialFromIpv6(self, ipv6):
prefix = ipv6.split('/')[0].lower()
hi, lo = struct.unpack('!QQ', socket.inet_pton(socket.AF_INET6, prefix))
ipv6_int = (hi << 64) | lo
serial = '0x1%x' % ipv6_int
# delete non significant part
for part in prefix.split(':')[::-1]:
if part:
for i in ['0']*(4 - len(part)):
part = i + part
serial = serial.split(part)[0] + part
break
return serial
prefix, prefix_length = ipv6.split('/')
return "0x%x" % int('1%s' % binFromIpv6(prefix)[:int(prefix_length)], 2)
def generateCertificate(self):
key_file = self.options['key-file'].strip()
......
......@@ -109,13 +109,18 @@ class Re6stnetTest(unittest.TestCase):
recipe = self.new_recipe()
serial = recipe.getSerialFromIpv6(ipv6)
self.assertEqual(serial, '0x1be280db8fe6a0d8504fe054a00ae0aea')
self.assertEqual(serial, '0x1be280db8fe6a0d85')
ipv6 = '2001:db8:24::/48'
serial = recipe.getSerialFromIpv6(ipv6)
self.assertEqual(serial, '0x120010db80024')
ipv6 = '2001:db8:24::/47'
serial = recipe.getSerialFromIpv6(ipv6)
self.assertEqual(serial, '0x900086dc0012')
def test_install(self):
self.options.update({
'ipv6-prefix': '2001:db8:24::/48',
......
import unittest
import zc.buildout.testing
class UrlparseTest(unittest.TestCase):
def setUp(self):
self.buildout = buildout = zc.buildout.testing.Buildout()
buildout['urlinfo'] = {}
buildout['urlinfo']['url'] = "http://www.google.com/search?hl=en&q=urlparse&btnG=Google+Search"
from slapos.recipe import _urlparse
self.recipe = _urlparse.Recipe(buildout,"urlinfo",buildout['urlinfo'])
def test_options(self):
buildout = self.buildout
self.assertTrue(buildout['urlinfo'])
self.assertEqual(buildout['urlinfo']['path'], 'search')
self.assertEqual(buildout['urlinfo']['scheme'], 'http')
self.assertEqual(buildout['urlinfo']['host'], 'www.google.com')
self.assertEqual(buildout['urlinfo']['query'], 'hl=en&q=urlparse&btnG=Google+Search')
def test_install(self):
self.assertEqual(self.recipe.install(), [])
[instance-profile]
filename = instance.cfg.in
md5sum = 6e3e1dc304378640707cdb6a792106f1
#############################
#
# Deploy beremiz' runtime instance
#
#############################
[buildout]
parts =
publish-connection-parameter
download-plc
beremiz-runtime
#beremiz-runtime-promise
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
extends = {{ template_monitor }}
[download-plc]
recipe = slapos.recipe.build:download-unpacked
offline = false
url = ${instance-parameter:configuration.runtime_plc_url}
[instance-parameter]
recipe = slapos.cookbook:slapconfiguration
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
configuration.runtime_plc_url =
configuration.runtime_plc_md5sum =
configuration.autostart = 1
configuration.interface = 0.0.0.0
configuration.port = 61248
# Create all needed directories, depending on your needs
[directory]
recipe = slapos.cookbook:mkdirectory
home = ${buildout:directory}
etc = ${:home}/etc
var = ${:home}/var
script = ${:etc}/run
service = ${:etc}/service
log = ${:var}/log
[beremiz-runtime]
logfile = ${directory:log}/beremiz-runtime.log
recipe = slapos.cookbook:wrapper
command-line =
{{ buildout['bin-directory'] }}/pythonwitheggs {{ buildout['directory'] }}/parts/beremiz-source/Beremiz_service.py -a ${instance-parameter:configuration.autostart} -p ${instance-parameter:configuration.port} -i ${instance-parameter:configuration.interface} -x 1 ${directory:home}/parts/download-plc
wrapper-path = ${directory:service}/beremiz-runtime
[beremiz-runtime-promise]
<= monitor-promise-base
module = check_port_listening
name = beremiz-runtime.py
config-hostname= ${instance-parameter:configuration.interface}
config-port = ${instance-parameter:configuration.port}
[publish-connection-parameter]
recipe = slapos.cookbook:publish
port = ${instance-parameter:configuration.port}
interface = ${instance-parameter:configuration.interface}
[buildout]
extends =
buildout.hash.cfg
../../component/git/buildout.cfg
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
parts =
beremiz-source
slapos-cookbook
instance-profile
python-interpreter
[beremiz-source]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/beremiz/beremiz.git
branch = default
git-executable = ${git:location}/bin/git
[beremiz]
recipe = zc.recipe.egg:develop
egg = beremiz
setup = ${beremiz-source:location}
[Twisted]
recipe = zc.recipe.egg:custom
egg = Twisted
setup-eggs =
six
pathlib
incremental
[python-interpreter]
recipe = zc.recipe.egg
interpreter = pythonwitheggs
eggs = click
prompt_toolkit
pygments
bitarray
future
six
Pyro
zeroconf-py2compat
pathlib
Nevow
msgpack
autobahn
${beremiz:egg}
${Twisted:egg}
[instance-profile]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/${:filename}
rendered = ${buildout:directory}/instance.cfg
extensions = jinja2.ext.do
context =
section buildout buildout
raw template_monitor ${monitor2-template:rendered}
# md5sum is fetched from buildout.hash.cfg and can be recalculated automatically by
# calling update-hash
[versions]
Twisted = 20.3.0
attrs = 19.2.0
Automat = 0.3.0
zope.interface = 4.4.2
Nevow = 0.14.5
PyHamcrest = 2.0.2
Pygments = 2.9.0
Pyro = 3.16
bitarray = 2.1.3
constantly = 15.1.0
future = 0.18.2
hyperlink = 21.0.0
incremental = 21.3.0
pathlib = 1.0.1
prompt-toolkit = 3.0.19
zeroconf-py2compat = 0.19.10
# Required by:
# Automat==0.3.0
characteristic = 14.3.0
# Required by:
# zeroconf-py2compat==0.19.10
ifcfg = 0.21
# Required by:
# hyperlink==21.0.0
typing = 3.10.0.0
autobahn = 19.11.2
txaio = 18.8.1
idna = 2.10
......@@ -22,19 +22,19 @@ md5sum = 5784bea3bd608913769ff9a8afcccb68
[profile-caddy-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 0950e09ad1f03f0789308f5f7a7eb1b8
md5sum = 04e550480d3057ca65d87c6fadbaed6e
[profile-caddy-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = c5d1e235959a877b4f3157369c6f5e10
md5sum = 63b418626ef0f8ac54d6359fb6637371
[profile-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = c67e172c0c6eca955b18962404056a33
md5sum = e3ba0da5d137dcbd56c2604d200ac3b9
[profile-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
md5sum = df304a8aee87b6f2425241016a48f7a5
md5sum = be54431846fe7f3cee65260eefc83d62
[profile-caddy-frontend-configuration]
_update_hash_filename_ = templates/Caddyfile.in
......@@ -46,11 +46,11 @@ md5sum = 88af61e7abbf30dc99a1a2526161128d
[template-default-slave-virtualhost]
_update_hash_filename_ = templates/default-virtualhost.conf.in
md5sum = 37475d79f28c5f126bc1947fdb938fdb
md5sum = 57c86795293b11300a036f5f8cf2c868
[template-backend-haproxy-configuration]
_update_hash_filename_ = templates/backend-haproxy.cfg.in
md5sum = ae4c9ce775ea003aa51eda5ecbbeec73
md5sum = 6d4ad68ac44ccc72fe9148bd8e05a6f0
[template-empty]
_update_hash_filename_ = templates/empty.in
......@@ -62,7 +62,7 @@ md5sum = 975177dedf677d24e14cede5d13187ce
[template-trafficserver-records-config]
_update_hash_filename_ = templates/trafficserver/records.config.jinja2
md5sum = e87238c53d080ef9ef90040e57bc1395
md5sum = 715baa302d562a7e4eddc3d1bf72f981
[template-trafficserver-storage-config]
_update_hash_filename_ = templates/trafficserver/storage.config.jinja2
......@@ -94,15 +94,15 @@ md5sum = 8c150e1e6c993708d31936742f3a7302
[caddyprofiledeps-setup]
filename = setup.py
md5sum = 8e1c6c06c09beb921965b3ce98c67c9e
md5sum = f6f72d03af7d9dc29fb4d4fef1062e73
[caddyprofiledeps-dummy]
filename = caddyprofiledummy.py
md5sum = 59cb33f11272ee09eccea74981d2304a
md5sum = b41b8de115ad815d0b0db306ad650365
[profile-kedifa]
filename = instance-kedifa.cfg.in
md5sum = dfb4dabd1e4094de1276d171f998ef47
md5sum = 88f3a8cc30d3cf30f4bd2797f5c16221
[template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
......@@ -111,3 +111,7 @@ md5sum = 3336d554661b138dcef97b1d1866803c
[template-slave-introspection-httpd-nginx]
_update_hash_filename_ = templates/slave-introspection-httpd-nginx.conf.in
md5sum = 3067e6ba6c6901821d57d2109517d39c
[template-expose-csr-nginx-conf]
_update_hash_filename_ = templates/expose-csr-nginx.conf.in
md5sum = 5620baa8819fcc8340fa6777ee551a1a
from __future__ import print_function
import caucase.client
import caucase.utils
import os
import ssl
import sys
import urllib
import urlparse
from cryptography import x509
from cryptography.hazmat.primitives import serialization
class Recipe(object):
def __init__(self, *args, **kwargs):
pass
......@@ -19,3 +30,94 @@ def validate_netloc(netloc):
else:
hostname = parsed.hostname
return netloc == '%s:%s' % (hostname, parsed.port)
def _check_certificate(url, certificate):
parsed = urlparse.urlparse(url)
got_certificate = ssl.get_server_certificate((parsed.hostname, parsed.port))
if certificate.strip() != got_certificate.strip():
raise ValueError('Certificate for %s does not match expected one' % (url,))
def _get_exposed_csr(url, certificate):
_check_certificate(url, certificate)
self_signed = ssl.create_default_context()
self_signed.check_hostname = False
self_signed.verify_mode = ssl.CERT_NONE
return urllib.urlopen(url, context=self_signed).read()
def _get_caucase_client(ca_url, ca_crt, user_key):
return caucase.client.CaucaseClient(
ca_url=ca_url + '/cas',
ca_crt_pem_list=caucase.utils.getCertList(ca_crt),
user_key=user_key,
)
def _get_caucase_csr_list(ca_url, ca_crt, user_key):
csr_list = []
for entry in _get_caucase_client(
ca_url, ca_crt, user_key).getPendingCertificateRequestList():
csr = caucase.utils.load_certificate_request(
caucase.utils.toBytes(entry['csr']))
csr_list.append({
'csr_id': entry['id'],
'csr': csr.public_bytes(serialization.Encoding.PEM).decode()
})
return csr_list
def _csr_match(*csr_list):
number_list = set([])
for csr in csr_list:
number_list.add(
x509.load_pem_x509_csr(str(csr)).public_key().public_numbers())
return len(number_list) == 1
def _sign_csr(ca_url, ca_crt, user_key, csr, csr_list):
signed = False
client = _get_caucase_client(ca_url, ca_crt, user_key)
for csr_entry in csr_list:
if _csr_match(csr, csr_entry['csr']):
client.createCertificate(int(csr_entry['csr_id']))
print('Signed csr with id %s' % (csr_entry['csr_id'],))
signed = True
break
return signed
def _mark_done(filename):
with open(filename, 'w') as fh:
fh.write('done')
print('Marked file %s' % (filename,))
def _is_done(filename):
if os.path.exists(filename):
return True
return False
def smart_sign():
ca_url, ca_crt, done_file, user_key, csr_url, \
csr_url_certificate = sys.argv[1:]
if _is_done(done_file):
return
exposed_csr = _get_exposed_csr(csr_url, csr_url_certificate)
caucase_csr_list = _get_caucase_csr_list(ca_url, ca_crt, user_key)
if _sign_csr(
ca_url, ca_crt, user_key, exposed_csr, caucase_csr_list):
_mark_done(done_file)
else:
print('Failed to sign %s' % (csr_url,))
def caucase_csr_sign_check():
ca_url, ca_crt, user_key = sys.argv[1:]
if len(_get_caucase_csr_list(ca_url, ca_crt, user_key)) != 0:
print('ERR There are CSR to sign on %s' % (ca_url,))
sys.exit(1)
else:
print('OK No CSR to sign on %s' % (ca_url,))
......@@ -63,6 +63,75 @@ parts =
[caddyprofiledeps]
recipe = caddyprofiledeps
[frontend-node-id]
# Store id file in top of hierarchy, so it does not depend on directory creation
file = ${buildout:directory}/.frontend-node-id.txt
recipe = slapos.recipe.build
init =
import os
import secrets
if not os.path.exists(options['file']):
with open(options['file'], 'w') as fh:
fh.write(secrets.token_urlsafe(4))
with open(options['file'], 'r') as fh:
options['value'] = fh.read()
[frontend-node-private-salt]
# Private, not communicated, stable hash, which can be used to salt other
# hashes, so their values are connected to the node, but practicaly impossible
# to crack (until the node is hacked itself, but then those values are
# stolen anyway)
recipe = slapos.recipe.build
init =
import os
import uuid
if not os.path.exists(options['file']):
with open(options['file'], 'w') as fh:
fh.write(uuid.uuid4().hex)
with open(options['file'], 'r') as fh:
options['value'] = fh.read()
file = ${buildout:directory}/.frontend-node-private-salt.txt
[version-hash]
recipe = slapos.recipe.build
software-release-url = ${slap-connection:software-release-url}
hash-salt = ${frontend-node-private-salt:value}
init =
import hashlib
import base64
options['value'] = base64.urlsafe_b64encode(hashlib.md5(''.join([options['software-release-url'].strip(), options['hash-salt']])).digest())
[frontend-node-information]
recipe = slapos.recipe.build
file = ${buildout:directory}/.frontend-node-information.json
node-id = ${frontend-node-id:value}
current-hash = ${version-hash:value}
current-software-release-url = ${version-hash:software-release-url}
init =
import json
changed = False
try:
with open(options['file'], 'r') as fh:
data = json.load(fh)
except Exception:
changed = True
data = {
'node-id': options['node-id'],
'version-hash-history': {options['current-hash']: options['current-software-release-url']}
}
if 'node-id' not in data:
data['node-id'] = options['node-id']
changed = True
if 'version-hash-history' not in data:
data['version-hash-history'] = {}
changed = True
if options['current-hash'] not in data['version-hash-history']:
data['version-hash-history'][options['current-hash']] = options['current-software-release-url']
changed = True
if changed:
with open(options['file'], 'w') as fh:
json.dump(data, fh)
options['value'] = data
# Create all needed directories
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -89,10 +158,10 @@ bbb-ssl-dir = ${:srv}/bbb-ssl
frontend_cluster = ${:var}/frontend_cluster
# csr_id publication
csr_id = ${:srv}/csr_id
certificate-csr_id = ${:etc}/certificate-csr_id
expose-csr_id-var = ${:var}/expose-csr_id
# CSR publication
expose-csr = ${:srv}/expose-csr
expose-csr-etc = ${:etc}/expose-csr
expose-csr-var = ${:var}/expose-csr
# slave introspection
slave-introspection-var = ${:var}/slave-introspection
......@@ -179,6 +248,7 @@ template-empty = {{ software_parameter_dict['template_empty'] }}
template-default-slave-virtualhost = {{ software_parameter_dict['template_default_slave_virtualhost'] }}
template-backend-haproxy-configuration = {{ software_parameter_dict['template_backend_haproxy_configuration'] }}
template-backend-haproxy-rsyslogd-conf = {{ software_parameter_dict['template_backend_haproxy_rsyslogd_conf'] }}
template-expose-csr-nginx-conf = {{ software_parameter_dict['template_expose_csr_nginx_conf'] }}
[kedifa-login-config]
d = ${directory:ca-dir}
......@@ -295,14 +365,19 @@ extra-context =
key master_key_download_url :master_key_download_url
key autocert caddy-directory:autocert
key caddy_log_directory caddy-directory:slave-log
key expose_csr_id_organization :organization
key expose_csr_id_organizational_unit :organizational-unit
key expose_csr_organization :organization
key expose_csr_organizational_unit :organizational-unit
key global_ipv6 slap-configuration:ipv6-random
key empty_template software-release-path:template-empty
key template_default_slave_configuration software-release-path:template-default-slave-virtualhost
key template_expose_csr_nginx_conf software-release-path:template-expose-csr-nginx-conf
key software_type :software_type
key frontend_lazy_graceful_reload frontend-caddy-lazy-graceful:rendered
key monitor_base_url monitor-instance-parameter:monitor-base-url
key node_id frontend-node-id:value
key version_hash version-hash:value
key software_release_url version-hash:software-release-url
key node_information frontend-node-information:value
key custom_ssl_directory caddy-directory:custom-ssl-directory
# BBB: SlapOS Master non-zero knowledge BEGIN
key apache_certificate apache-certificate:rendered
......@@ -460,6 +535,8 @@ disk-cache-size = ${configuration:disk-cache-size}
ram-cache-size = ${configuration:ram-cache-size}
templates-dir = {{ software_parameter_dict['trafficserver'] }}/etc/trafficserver/body_factory
request-timeout = ${configuration:request-timeout}
version-hash = ${version-hash:value}
node-id = ${frontend-node-id:value}
[trafficserver-configuration-directory]
recipe = plone.recipe.command
......
......@@ -286,7 +286,7 @@ config-monitor-username = ${monitor-instance-parameter:username}
config-monitor-password = ${monitor-htpasswd:passwd}
software-type = {{frontend_type}}
return = slave-instance-information-list monitor-base-url backend-client-csr_id-url csr_id-url csr_id-certificate backend-haproxy-statistic-url
return = slave-instance-information-list monitor-base-url backend-client-csr-url kedifa-csr-url csr-certificate backend-haproxy-statistic-url node-information-json
{#- Send only needed parameters to frontend nodes #}
{%- set base_node_configuration_dict = {} %}
......@@ -362,25 +362,26 @@ warning-slave-dict = {{ dumps(json_module.dumps(warning_slave_dict, sort_keys=Tr
{% if not aikc_enabled or not aibcc_enabled %}
{% for frontend in frontend_list %}
{% set section_part = '${request-' + frontend %}
{{ frontend }}-csr_id-certificate = {{ section_part }}:connection-csr_id-certificate}
{{ frontend }}-csr-certificate = {{ section_part }}:connection-csr-certificate}
{% endfor %}
{% endif %}
{% if not aikc_enabled %}
kedifa-csr_id-url = ${request-kedifa:connection-csr_id-url}
kedifa-csr_id-certificate = ${request-kedifa:connection-csr_id-certificate}
kedifa-csr-url = ${request-kedifa:connection-kedifa-csr-url}
kedifa-csr-certificate = ${request-kedifa:connection-csr-certificate}
{% for frontend in frontend_list %}
{% set section_part = '${request-' + frontend %}
{{ frontend }}-csr_id-url = {{ section_part }}:connection-csr_id-url}
{{ frontend }}-kedifa-csr-url = {{ section_part }}:connection-kedifa-csr-url}
{% endfor %}
{% endif %}
{% for frontend in frontend_list %}
{% set section_part = '${request-' + frontend %}
{{ frontend }}-backend-haproxy-statistic-url = {{ section_part }}:connection-backend-haproxy-statistic-url}
{{ frontend }}-node-information-json = ${frontend-information:{{ frontend }}-node-information-json}
{% endfor %}
{% if not aibcc_enabled %}
{% for frontend in frontend_list %}
{% set section_part = '${request-' + frontend %}
{{ frontend }}-backend-client-csr_id-url = {{ section_part }}:connection-backend-client-csr_id-url}
{{ frontend }}-backend-client-csr-url = {{ section_part }}:connection-backend-client-csr-url}
{% endfor %}
{% endif %}
......@@ -431,7 +432,7 @@ software-url = ${slap-connection:software-release-url}
{% endif %}
software-type = kedifa
name = kedifa
return = slave-kedifa-information master-key-generate-auth-url master-key-upload-url master-key-download-url caucase-url csr_id-url csr_id-certificate monitor-base-url
return = slave-kedifa-information master-key-generate-auth-url master-key-upload-url master-key-download-url caucase-url kedifa-csr-url csr-certificate monitor-base-url
{% set sla_kedifa_key = "-sla-kedifa-" %}
{% set sla_kedifa_key_length = sla_kedifa_key | length %}
{% for key in slapparameter_dict.keys() %}
......@@ -461,6 +462,12 @@ warning-slave-dict = {{ dumps(warning_slave_dict) }}
{# sort_keys are important in order to avoid shuffling parameters on each run #}
active-slave-instance-list = {{ json_module.dumps(active_slave_instance_list, sort_keys=True) }}
[frontend-information]
{% for frontend in frontend_list %}
{% set section_part = '${request-' + frontend %}
{{ frontend }}-node-information-json = {{ section_part }}:connection-node-information-json}
{% endfor %}
[dynamic-publish-slave-information]
< = jinja2-template-base
template = {{ software_parameter_dict['profile_replicate_publish_slave_information'] }}
......@@ -468,6 +475,7 @@ filename = dynamic-publish-slave-information.cfg
extensions = jinja2.ext.do
extra-context =
section slave_information slave-information
section frontend_information frontend-information
section rejected_slave_information rejected-slave-information
section active_slave_instance_dict active-slave-instance
section warning_slave_information warning-slave-information
......@@ -505,7 +513,6 @@ crl = ${directory:aikc}/crl.pem
user-ca-certificate = ${directory:aikc}/user-ca-certificate.pem
user-crl = ${directory:aikc}/user-crl.pem
user-created = ${directory:aikc}/user-created
csr_id = ${directory:aikc}/csr_id
data_dir = ${directory:aikc}/caucase-updater
[aikc-user-csr]
......@@ -552,11 +559,12 @@ recipe = plone.recipe.command
{#- The called command is smart enough to survive errors and retry #}
stop-on-error = False
update-command = ${:command}
csr_id = ${directory:aikc}/csr_id
command =
if ! [ -f ${aikc-config:user-created} ] ; then
${aikc-caucase-wrapper:rendered} --mode user --send-csr ${aikc-user-csr:csr} > ${aikc-config:csr_id} || exit 1
cut -d ' ' -f 1 ${aikc-config:csr_id} || exit 1
csr_id=`cut -d ' ' -f 1 ${aikc-config:csr_id}`
${aikc-caucase-wrapper:rendered} --mode user --send-csr ${aikc-user-csr:csr} > ${:csr_id} || exit 1
cut -d ' ' -f 1 ${:csr_id} || exit 1
csr_id=`cut -d ' ' -f 1 ${:csr_id}`
sleep 1
${aikc-caucase-wrapper:rendered} --mode user --get-crt $csr_id ${aikc-config:key} || exit 1
touch ${aikc-config:user-created}
......@@ -577,44 +585,32 @@ command =
mode='user',
)}}
[aikc-check-certificate]
recipe = slapos.recipe.template:jinja2
rendered = ${directory:bin}/aikc-check-certificate
template = inline:
import sys
import ssl
import urlparse
certificate = sys.argv[2]
parsed = urlparse.urlparse(sys.argv[1])
got_certificate = ssl.get_server_certificate((parsed.hostname, parsed.port))
sys.exit(0) if certificate.strip() == got_certificate.strip() else sys.exit(1)
[aikc-sign-promise-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ software_parameter_dict['caucase_csr_sign_check'] }}
${aikc-config:caucase-url}
${aikc-config:ca-certificate}
${aikc-config:key}
wrapper-path = ${directory:bin}/aikc-caucase-csr-sign-check
{% do part_list.append('aikc-sign-promise') %}
[aikc-sign-promise]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${aikc-sign-promise-wrapper:wrapper-path}
{% for csr in frontend_list + ['kedifa'] %}
[aikc-{{ csr }}-wrapper]
{# jinja2 instead of wrapper is used with context to remove py'u' #}
recipe = slapos.recipe.template:jinja2
context =
key csr_id_url request-{{ csr }}:connection-csr_id-url
key csr_id_certificate request-{{ csr }}:connection-csr_id-certificate
template = inline:#!{{ software_parameter_dict['dash'] }}/bin/dash
test -f ${directory:aikc}/{{ csr }}-done && exit 0
${buildout:executable} ${aikc-check-certificate:rendered} \
{# raw block to use context #}
{% raw %}
{{ csr_id_url }} \
"""{{ csr_id_certificate }}"""
{% endraw %}
if [ $? = 0 ]; then
csr_id=`{{ software_parameter_dict['curl'] }}/bin/curl -s -k -g \
{% raw %}
{{ csr_id_url }} \
{% endraw %}
` || exit 1
${aikc-caucase-wrapper:rendered} --user-key ${aikc-config:key} --sign-csr $csr_id && touch ${directory:aikc}/{{ csr }}-done
fi
rendered = ${directory:bin}/aikc-{{ csr }}-wrapper
mode = 0700
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/aikc-{{ csr }}-wrapper
command-line = {{ software_parameter_dict['smart_caucase_signer'] }}
${aikc-config:caucase-url}
${aikc-config:ca-certificate}
${directory:aikc}/{{ csr }}-done
${aikc-config:key}
${request-{{ csr }}:connection-kedifa-csr-url}
"${request-{{ csr }}:connection-csr-certificate}"
{% do part_list.append('aikc-%s' % (csr,)) %}
[aikc-{{ csr }}]
......@@ -622,7 +618,7 @@ recipe = plone.recipe.command
{#- The called command is smart enough to survive errors and retry #}
stop-on-error = False
command =
${aikc-{{ csr }}-wrapper:rendered}
${aikc-{{ csr }}-wrapper:wrapper-path}
update-command = ${:command}
{% endfor %}
{% endif %} {# if aikc_enabled #}
......@@ -641,7 +637,6 @@ crl = ${directory:aibcc}/crl.pem
user-ca-certificate = ${directory:aibcc}/user-ca-certificate.pem
user-crl = ${directory:aibcc}/user-crl.pem
user-created = ${directory:aibcc}/user-created
csr_id = ${directory:aibcc}/csr_id
data_dir = ${directory:aibcc}/caucase-updater
[aibcc-user-csr]
......@@ -668,6 +663,7 @@ recipe = slapos.recipe.template:jinja2
context =
key caucase_url aibcc-config:caucase-url
template = inline:#!{{ software_parameter_dict['dash'] }}/bin/dash
exec {{ software_parameter_dict['bin_directory'] }}/caucase \
{# raw block to use context #}
{% raw %}
......@@ -690,11 +686,12 @@ recipe = plone.recipe.command
{#- XXX: Create promise #}
stop-on-error = False
update-command = ${:command}
csr_id = ${directory:aibcc}/csr_id
command =
if ! [ -f ${aibcc-config:user-created} ] ; then
${aibcc-caucase-wrapper:rendered} --mode user --send-csr ${aibcc-user-csr:csr} > ${aibcc-config:csr_id} || exit 1
cut -d ' ' -f 1 ${aibcc-config:csr_id} || exit 1
csr_id=`cut -d ' ' -f 1 ${aibcc-config:csr_id}`
${aibcc-caucase-wrapper:rendered} --mode user --send-csr ${aibcc-user-csr:csr} > ${:csr_id} || exit 1
cut -d ' ' -f 1 ${:csr_id} || exit 1
csr_id=`cut -d ' ' -f 1 ${:csr_id}`
sleep 1
${aibcc-caucase-wrapper:rendered} --mode user --get-crt $csr_id ${aibcc-config:key} || exit 1
touch ${aibcc-config:user-created}
......@@ -715,43 +712,32 @@ command =
mode='user',
)}}
[aibcc-check-certificate]
recipe = slapos.recipe.template:jinja2
rendered = ${directory:bin}/aibcc-check-certificate
template = inline:
import sys
import ssl
import urlparse
certificate = sys.argv[2]
parsed = urlparse.urlparse(sys.argv[1])
got_certificate = ssl.get_server_certificate((parsed.hostname, parsed.port))
sys.exit(0) if certificate.strip() == got_certificate.strip() else sys.exit(1)
[aibcc-sign-promise-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ software_parameter_dict['caucase_csr_sign_check'] }}
${aibcc-config:caucase-url}
${aibcc-config:ca-certificate}
${aibcc-config:key}
wrapper-path = ${directory:bin}/aibcc-caucase-csr-sign-check
{% do part_list.append('aibcc-sign-promise') %}
[aibcc-sign-promise]
<= monitor-promise-base
promise = check_command_execute
name = ${:_buildout_section_name_}.py
config-command = ${aibcc-sign-promise-wrapper:wrapper-path}
{% for csr in frontend_list %}
[aibcc-{{ csr }}-wrapper]
{# jinja2 instead of wrapper is used with context to remove py'u' #}
recipe = slapos.recipe.template:jinja2
context =
key csr_id_url request-{{ csr }}:connection-backend-client-csr_id-url
key csr_id_certificate request-{{ csr }}:connection-csr_id-certificate
template = inline:#!{{ software_parameter_dict['dash'] }}/bin/dash
test -f ${directory:aibcc}/{{ csr }}-done && exit 0
${buildout:executable} ${aibcc-check-certificate:rendered} \
{# raw block to use context #}
{% raw %}
{{ csr_id_url }} \
"""{{ csr_id_certificate }}"""
{% endraw %}
if [ $? = 0 ]; then
csr_id=`{{ software_parameter_dict['curl'] }}/bin/curl -s -k -g \
{% raw %}
{{ csr_id_url }} \
{% endraw %}
` || exit 1
${aibcc-caucase-wrapper:rendered} --user-key ${aibcc-config:key} --sign-csr $csr_id && touch ${directory:aibcc}/{{ csr }}-done
fi
rendered = ${directory:bin}/aibcc-{{ csr }}-wrapper
mode = 0700
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/aibcc-{{ csr }}-wrapper
command-line = {{ software_parameter_dict['smart_caucase_signer'] }}
${aibcc-config:caucase-url}
${aibcc-config:ca-certificate}
${directory:aibcc}/{{ csr }}-done
${aibcc-config:key}
${request-{{ csr }}:connection-backend-client-csr-url}
"${request-{{ csr }}:connection-csr-certificate}"
{% do part_list.append('aibcc-%s' % (csr,)) %}
[aibcc-{{ csr }}]
......@@ -759,7 +745,7 @@ recipe = plone.recipe.command
{#- The called command is smart enough to survive errors and retry #}
stop-on-error = False
command =
${aibcc-{{ csr }}-wrapper:rendered}
${aibcc-{{ csr }}-wrapper:wrapper-path}
update-command = ${:command}
{% endfor %}
{% endif %} {# if aibcc_enabled #}
......
......@@ -69,7 +69,7 @@
},
"automatic-internal-kedifa-caucase-csr": {
"default": "true",
"description": "Automatically signs CSRs sent to KeDiFa's caucase, based on csr_id and matching certificate.",
"description": "Automatically signs CSRs sent to KeDiFa's caucase, based on CSR comparison.",
"enum": [
"true",
"false"
......@@ -79,7 +79,7 @@
},
"automatic-internal-backend-client-caucase-csr": {
"default": "true",
"description": "Automatically signs CSRs sent to Backend Client's caucase, based on csr_id and matching certificate.",
"description": "Automatically signs CSRs sent to Backend Client's caucase, based on CSR comparison.",
"enum": [
"true",
"false"
......
......@@ -17,8 +17,7 @@ parts =
caucased
caucased-promise
caucase-updater
expose-csr_id
promise-expose-csr_id-ip-port
promise-expose-csr-ip-port
promise-logrotate-setup
[monitor-instance-parameter]
......@@ -74,10 +73,10 @@ backup-caucased = ${:backup}/caucased
# reservation
reservation = ${:srv}/reservation
# csr_id publication
csr_id = ${:srv}/csr_id
certificate-csr_id = ${:var}/certificate-csr_id
expose-csr_id-var = ${:var}/expose-csr_id
# CSR publication
expose-csr = ${:srv}/expose-csr
expose-csr-etc = ${:etc}/expose-csr
expose-csr-var = ${:var}/expose-csr
[kedifa-csr]
recipe = plone.recipe.command
......@@ -113,29 +112,19 @@ stop-on-error = True
template_csr='${kedifa-csr:template-csr}'
)}}
[store-csr_id]
[expose-csr-link-csr]
recipe = plone.recipe.command
csr_id_path = ${directory:csr_id}/csr_id.txt
csr_work_path = ${directory:tmp}/${:_buildout_section_name_}
filename = csr.pem
csr_path = ${directory:expose-csr}/${:filename}
stop-on-error = False
update-command = ${:command}
command =
{{ software_parameter_dict['bin_directory'] }}/caucase \
--ca-url {{ caucase_url }} \
--ca-crt ${kedifa-config:ca-certificate} \
--crl ${kedifa-config:crl} \
--mode service \
{#- XXX: Need to use caucase-updater-csr:csr, as there is no way to obatin csr_id from caucase-updater -#}
{#- XXX: nor directly path to the generated CSR #}
--send-csr ${caucase-updater-csr:csr} > ${:csr_work_path} && \
cut -d ' ' -f 1 ${:csr_work_path} > ${:csr_id_path}
[certificate-csr_id]
ln -sf ${caucase-updater-csr:csr} ${:csr_path}
[expose-csr-certificate]
recipe = plone.recipe.command
certificate = ${directory:certificate-csr_id}/certificate.pem
key = ${directory:certificate-csr_id}/key.pem
certificate = ${directory:expose-csr-etc}/certificate.pem
key = ${directory:expose-csr-etc}/key.pem
{#- Can be stopped on error, as does not rely on self provided service #}
stop-on-error = True
......@@ -147,70 +136,44 @@ command =
-days 5 -nodes -x509 -keyout ${:key} -out ${:certificate}
fi
[expose-csr_id-configuration]
[expose-csr-configuration]
ip = {{ instance_parameter_dict['ipv6-random'] }}
port = 17000
key = ${certificate-csr_id:key}
certificate = ${certificate-csr_id:certificate}
error-log = ${directory:log}/expose-csr_id.log
[expose-csr_id-template]
key = ${expose-csr-certificate:key}
certificate = ${expose-csr-certificate:certificate}
error-log = ${directory:log}/expose-csr.log
var = ${directory:expose-csr-var}
pid = ${directory:var}/nginx-expose-csr.pid
root = ${directory:expose-csr}
nginx_mime = {{ software_parameter_dict['nginx_mime'] }}
[expose-csr-template]
recipe = slapos.recipe.template:jinja2
var = ${directory:expose-csr_id-var}
pid = ${directory:var}/nginx-expose-csr_id.pid
rendered = ${directory:etc}/nginx-expose-csr_id.conf
template = inline:
daemon off;
pid ${:pid};
error_log ${expose-csr_id-configuration:error-log};
events {
}
http {
include {{ software_parameter_dict['nginx_mime'] }};
server {
server_name_in_redirect off;
port_in_redirect off;
error_log ${expose-csr_id-configuration:error-log};
access_log /dev/null;
listen [${expose-csr_id-configuration:ip}]:${expose-csr_id-configuration:port} ssl;
ssl_certificate ${expose-csr_id-configuration:certificate};
ssl_certificate_key ${expose-csr_id-configuration:key};
default_type application/octet-stream;
client_body_temp_path ${:var} 1 2;
proxy_temp_path ${:var} 1 2;
fastcgi_temp_path ${:var} 1 2;
uwsgi_temp_path ${:var} 1 2;
scgi_temp_path ${:var} 1 2;
location / {
alias ${directory:csr_id}/;
autoindex off;
sendfile on;
sendfile_max_chunk 1m;
}
}
}
[promise-expose-csr_id-ip-port]
rendered = ${directory:expose-csr-etc}/nginx.conf
template = {{ software_parameter_dict['template_expose_csr_nginx_conf'] }}
context =
section configuration expose-csr-configuration
[promise-expose-csr-ip-port]
<= monitor-promise-base
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
name = expose-csr-ip-port-listening.py
config-host = ${expose-csr-configuration:ip}
config-port = ${expose-csr-configuration:port}
[expose-csr_id]
depends = ${store-csr_id:command}
[expose-csr]
recipe = slapos.cookbook:wrapper
command-line = {{ software_parameter_dict['nginx'] }}
-c ${expose-csr_id-template:rendered}
-c ${expose-csr-template:rendered}
url = https://[${expose-csr-configuration:ip}]:${expose-csr-configuration:port}
wrapper-path = ${directory:service}/expose-csr_id
wrapper-path = ${directory:service}/expose-csr
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[get-csr_id-certificate]
[expose-csr-certificate-get]
recipe = collective.recipe.shelloutput
commands =
certificate = cat ${certificate-csr_id:certificate}
certificate = cat ${expose-csr-certificate:certificate}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
......@@ -325,8 +288,8 @@ caucase-url = {{ caucase_url }}
master-key-generate-auth-url = https://[${kedifa-config:ip}]:${kedifa-config:port}/${master-auth-random:passwd}/generateauth
master-key-upload-url = https://[${kedifa-config:ip}]:${kedifa-config:port}/${master-auth-random:passwd}?auth=
master-key-download-url = https://[${kedifa-config:ip}]:${kedifa-config:port}/${master-auth-random:passwd}
csr_id-url = https://[${expose-csr_id-configuration:ip}]:${expose-csr_id-configuration:port}/csr_id.txt
csr_id-certificate = ${get-csr_id-certificate:certificate}
kedifa-csr-url = ${expose-csr:url}/${expose-csr-link-csr:filename}
csr-certificate = ${expose-csr-certificate-get:certificate}
monitor-base-url = ${monitor-instance-parameter:monitor-base-url}
[promise-logrotate-setup]
......
......@@ -46,24 +46,28 @@
"description": "Total amount of Slaves allocated to the Instance (include blocked ones)",
"type": "integer"
},
"kedifa-csr_id-url": {
"description": "URL on which KeDiFa publishes its csr_id sent to caucase.",
"kedifa-csr-url": {
"description": "URL on which KeDiFa publishes its CSR sent to caucase.",
"type": "string"
},
"kedifa-csr_id-certificate": {
"description": "Certificate used to serve data on kedifa-csr_id-url.",
"kedifa-csr-certificate": {
"description": "Certificate used to serve data on kedifa-csr-url.",
"type": "string"
},
"kedifa-caucase-url": {
"description": "Url to caucase used by KeDiFa.",
"type": "string"
},
"caddy-frontend-N-csr_id-url": {
"description": "URL on which frontend node number N publishes its csr_id sent to caucase.",
"caddy-frontend-N-kedifa-csr-url": {
"description": "URL on which frontend node number N publishes its Kedifa CSR sent to caucase.",
"type": "string"
},
"caddy-frontend-N-csr_id-certificate": {
"description": "Certificate used to serve data on caddy-frontend-N-csr_id-url.",
"caddy-frontend-N-backend-client-csr-url": {
"description": "URL on which frontend node number N publishes its Backend Client CSR sent to caucase.",
"type": "string"
},
"caddy-frontend-N-csr-certificate": {
"description": "Certificate used to serve data on CSRs.",
"type": "string"
},
"warning-slave-dict": {
......
......@@ -9,10 +9,16 @@ setup(
'validators',
'furl',
'orderedmultidict',
'caucase',
'python2-secrets',
],
entry_points={
'zc.buildout': [
'default = caddyprofiledummy:Recipe',
],
'console_scripts': [
'smart-caucase-signer = caddyprofiledummy:smart_sign',
'caucase-csr-sign-check = caddyprofiledummy:caucase_csr_sign_check'
]
}
)
......@@ -99,6 +99,7 @@ template_trafficserver_records_config = ${template-trafficserver-records-config:
template_trafficserver_storage_config = ${template-trafficserver-storage-config:target}
template_validate_script = ${template-validate-script:target}
template_wrapper = ${template-wrapper:output}
template_expose_csr_nginx_conf = ${template-expose-csr-nginx-conf:target}
# directories
bin_directory = ${buildout:bin-directory}
......@@ -123,6 +124,8 @@ kedifa-updater = ${:bin_directory}/kedifa-updater
kedifa-csr = ${:bin_directory}/kedifa-csr
xz_location = ${xz-utils:location}
htpasswd = ${:bin_directory}/htpasswd
smart_caucase_signer = ${:bin_directory}/smart-caucase-signer
caucase_csr_sign_check = ${:bin_directory}/caucase-csr-sign-check
[template]
recipe = slapos.recipe.template:jinja2
......@@ -203,11 +206,15 @@ output = ${buildout:directory}/template-wrapper.cfg
[template-backend-haproxy-rsyslogd-conf]
<=download-template
[template-expose-csr-nginx-conf]
<=download-template
[versions]
kedifa = 0.0.6
# Modern KeDiFa requires zc.lockfile
zc.lockfile = 1.4
python2-secrets = 1.0.5
validators = 0.12.2
PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3
......
......@@ -330,6 +330,8 @@ certificate = {{ certificate }}
https_port = {{ dumps('' ~ configuration['port']) }}
http_port = {{ dumps('' ~ configuration['plain_http_port']) }}
local_ipv4 = {{ dumps('' ~ instance_parameter_dict['ipv4-random']) }}
version-hash = {{ version_hash }}
node-id = {{ node_id }}
{%- for key, value in slave_instance.iteritems() %}
{%- if value is not none %}
{{ key }} = {{ dumps(value) }}
......@@ -453,9 +455,9 @@ recipe = slapos.cookbook:publish.serialised
slave-instance-information-list = {{ json_module.dumps(slave_instance_information_list, sort_keys=True) }}
{%- endif %}
monitor-base-url = {{ monitor_base_url }}
csr_id-url = https://[${expose-csr_id-configuration:ip}]:${expose-csr_id-configuration:port}/csr_id.txt
backend-client-csr_id-url = https://[${expose-csr_id-configuration:ip}]:${expose-csr_id-configuration:port}/backend-haproxy-csr_id.txt
csr_id-certificate = ${get-csr_id-certificate:certificate}
kedifa-csr-url = ${expose-csr:url}/${expose-csr-link-csr-kedifa:filename}
backend-client-csr-url = ${expose-csr:url}/${expose-csr-link-csr-backend-haproxy:filename}
csr-certificate = ${expose-csr-certificate-get:certificate}
{%- set furled = furl_module.furl(backend_haproxy_configuration['statistic-frontend-secure_access']) %}
{%- do furled.set(username = backend_haproxy_configuration['statistic-username']) %}
{%- do furled.set(password = backend_haproxy_configuration['statistic-password']) %}
......@@ -463,6 +465,8 @@ csr_id-certificate = ${get-csr_id-certificate:certificate}
{#- We unquote, as furl quotes automatically, but there is buildout value on purpose like ${...:...} in the passwod #}
{%- set statistic_url = urlparse_module.unquote(furled.tostr()) %}
backend-haproxy-statistic-url = {{ statistic_url }}
{#- sort_keys are important in order to avoid shuffling parameters on each run #}
node-information-json = {{ json_module.dumps(node_information, sort_keys=True) }}
[kedifa-updater]
recipe = slapos.cookbook:wrapper
......@@ -513,23 +517,26 @@ global-ipv6 = ${slap-configuration:ipv6-random}
request-timeout = {{ dumps('' ~ configuration['request-timeout']) }}
backend-connect-timeout = {{ dumps('' ~ configuration['backend-connect-timeout']) }}
backend-connect-retries = {{ dumps('' ~ configuration['backend-connect-retries']) }}
version-hash = {{ version_hash }}
node-id = {{ node_id }}
[store-backend-haproxy-csr_id]
[template-expose-csr-link-csr]
recipe = plone.recipe.command
csr_id_path = {{ directory['csr_id'] }}/backend-haproxy-csr_id.txt
csr_work_path = {{ directory['tmp'] }}/${:_buildout_section_name_}
stop-on-error = False
update-command = ${:command}
csr_path = {{ directory['expose-csr'] }}/${:filename}
command =
{{ software_parameter_dict['bin_directory'] }}/caucase \
--ca-url {{ backend_haproxy_configuration['caucase-url'] }} \
--ca-crt {{ backend_haproxy_configuration['cas-ca-certificate'] }} \
--crl {{ backend_haproxy_configuration['crl'] }} \
--mode service \
--send-csr {{ backend_haproxy_configuration['csr'] }} > ${:csr_work_path} && \
cut -d ' ' -f 1 ${:csr_work_path} > ${:csr_id_path}
ln -sf ${:csr} ${:csr_path}
[expose-csr-link-csr-backend-haproxy]
<= template-expose-csr-link-csr
filename = backend-haproxy-csr.pem
csr = {{ backend_haproxy_configuration['csr'] }}
[expose-csr-link-csr-kedifa]
<= template-expose-csr-link-csr
filename = kedifa-csr.pem
csr = {{ kedifa_configuration['csr'] }}
##<Backend haproxy>
......@@ -550,32 +557,14 @@ parts +=
publish-caddy-information
tunnel-6to4-base-http_port
tunnel-6to4-base-https_port
expose-csr_id
promise-expose-csr_id-ip-port
promise-expose-csr-ip-port
cache-access = {{ cache_access }}
[store-csr_id]
recipe = plone.recipe.command
csr_id_path = {{ directory['csr_id'] }}/csr_id.txt
csr_work_path = {{ directory['tmp'] }}/${:_buildout_section_name_}
stop-on-error = False
update-command = ${:command}
command =
{{ software_parameter_dict['bin_directory'] }}/caucase \
--ca-url {{ kedifa_configuration['caucase-url'] }} \
--ca-crt {{ kedifa_configuration['cas-ca-certificate'] }} \
--crl {{ kedifa_configuration['crl'] }} \
--mode service \
--send-csr {{ kedifa_configuration['csr'] }} > ${:csr_work_path} && \
cut -d ' ' -f 1 ${:csr_work_path} > ${:csr_id_path}
[certificate-csr_id]
[expose-csr-certificate]
recipe = plone.recipe.command
certificate = {{ directory['certificate-csr_id'] }}/certificate.pem
key = {{ directory['certificate-csr_id'] }}/key.pem
certificate = {{ directory['expose-csr-etc'] }}/certificate.pem
key = {{ directory['expose-csr-etc'] }}/key.pem
{#- Can be stopped on error, as does not rely on self provided service #}
stop-on-error = True
......@@ -583,76 +572,48 @@ update-command = ${:command}
command =
if ! [ -f ${:key} ] && ! [ -f ${:certificate} ] ; then
openssl req -new -newkey rsa:2048 -sha256 -subj \
"/O={{ expose_csr_id_organization }}/OU={{ expose_csr_id_organizational_unit }}/CN=${slap-configuration:ipv6-random}" \
"/O={{ expose_csr_organization }}/OU={{ expose_csr_organizational_unit }}/CN=${slap-configuration:ipv6-random}" \
-days 5 -nodes -x509 -keyout ${:key} -out ${:certificate}
fi
[expose-csr_id-configuration]
[expose-csr-configuration]
ip = ${slap-configuration:ipv6-random}
port = 17001
key = ${certificate-csr_id:key}
certificate = ${certificate-csr_id:certificate}
error-log = {{ directory['log'] }}/expose-csr_id.log
[expose-csr_id-template]
key = ${expose-csr-certificate:key}
certificate = ${expose-csr-certificate:certificate}
error-log = {{ directory['log'] }}/expose-csr.log
var = {{ directory['expose-csr-var'] }}
pid = {{ directory['var'] }}/nginx-expose-csr.pid
root = {{ directory['expose-csr'] }}
nginx_mime = {{ software_parameter_dict['nginx_mime'] }}
[expose-csr-template]
recipe = slapos.recipe.template:jinja2
var = {{ directory['expose-csr_id-var'] }}
pid = {{ directory['var'] }}/nginx-expose-csr_id.pid
rendered = {{ directory['etc'] }}/nginx-expose-csr_id.conf
template = inline:
daemon off;
pid ${:pid};
error_log ${expose-csr_id-configuration:error-log};
events {
}
http {
include {{ software_parameter_dict['nginx_mime'] }};
server {
server_name_in_redirect off;
port_in_redirect off;
error_log ${expose-csr_id-configuration:error-log};
access_log /dev/null;
listen [${expose-csr_id-configuration:ip}]:${expose-csr_id-configuration:port} ssl;
ssl_certificate ${expose-csr_id-configuration:certificate};
ssl_certificate_key ${expose-csr_id-configuration:key};
default_type application/octet-stream;
client_body_temp_path ${:var} 1 2;
proxy_temp_path ${:var} 1 2;
fastcgi_temp_path ${:var} 1 2;
uwsgi_temp_path ${:var} 1 2;
scgi_temp_path ${:var} 1 2;
location / {
alias {{ directory['csr_id'] }}/;
autoindex off;
sendfile on;
sendfile_max_chunk 1m;
}
}
}
[promise-expose-csr_id-ip-port]
rendered = {{ directory['expose-csr-etc'] }}/nginx.conf
template = {{ template_expose_csr_nginx_conf }}
context =
section configuration expose-csr-configuration
[promise-expose-csr-ip-port]
<= monitor-promise-base
promise = check_socket_listening
name = expose-csr_id-ip-port-listening.py
config-host = ${expose-csr_id-configuration:ip}
config-port = ${expose-csr_id-configuration:port}
[expose-csr_id]
depends =
${store-csr_id:command}
${store-backend-haproxy-csr_id:command}
name = expose-csr-ip-port-listening.py
config-host = ${expose-csr-configuration:ip}
config-port = ${expose-csr-configuration:port}
[expose-csr]
recipe = slapos.cookbook:wrapper
command-line = {{ software_parameter_dict['nginx'] }}
-c ${expose-csr_id-template:rendered}
-c ${expose-csr-template:rendered}
url = https://[${expose-csr-configuration:ip}]:${expose-csr-configuration:port}
wrapper-path = {{ directory['service'] }}/expose-csr_id
wrapper-path = {{ directory['service'] }}/expose-csr
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[get-csr_id-certificate]
[expose-csr-certificate-get]
recipe = collective.recipe.shelloutput
commands =
certificate = cat ${certificate-csr_id:certificate}
certificate = cat ${expose-csr-certificate:certificate}
[promise-logrotate-setup]
<= monitor-promise-base
......
......@@ -58,6 +58,8 @@ frontend statistic
frontend http-backend
bind {{ configuration['local-ipv4'] }}:{{ configuration['http-port'] }}
http-request add-header Via "%HV rapid-cdn-backend-{{ configuration['node-id'] }}-{{ configuration['version-hash'] }}"
http-response add-header Via "%HV rapid-cdn-backend-{{ configuration['node-id'] }}-{{ configuration['version-hash']}}"
{%- for slave_instance in backend_slave_list -%}
{{ frontend_entry(slave_instance, 'http', False) }}
{%- endfor %}
......
......@@ -21,6 +21,10 @@
# workaround for lost connection to haproxy by reconnecting
try_duration 3s
try_interval 250ms
header_upstream +Via "{proto} rapid-cdn-frontend-{{ slave_parameter['node-id'] }}-{{ slave_parameter['version-hash'] }}"
{%- if not slave_parameter['disable-via-header'] %}
header_downstream +Via "{proto} rapid-cdn-frontend-{{ slave_parameter['node-id'] }}-{{ slave_parameter['version-hash'] }}"
{%- endif %}
{%- endmacro %} {# proxy_header #}
{%- macro hsts_header(tls) %}
......
daemon off;
pid {{ configuration['pid'] }};
error_log {{ configuration['error-log'] }};
events {
}
http {
include {{ configuration['nginx_mime'] }};
server {
server_name_in_redirect off;
port_in_redirect off;
error_log {{ configuration['error-log'] }};
access_log /dev/null;
listen [{{ configuration['ip'] }}]:{{ configuration['port'] }} ssl;
ssl_certificate {{ configuration['certificate'] }};
ssl_certificate_key {{ configuration['key'] }};
default_type application/octet-stream;
client_body_temp_path {{ configuration['var'] }} 1 2;
proxy_temp_path {{ configuration['var'] }} 1 2;
fastcgi_temp_path {{ configuration['var'] }} 1 2;
uwsgi_temp_path {{ configuration['var'] }} 1 2;
scgi_temp_path {{ configuration['var'] }} 1 2;
location / {
alias {{ configuration['root'] }}/;
autoindex off;
sendfile on;
sendfile_max_chunk 1m;
}
}
}
......@@ -72,6 +72,9 @@ log-access-url = {{ dumps(json_module.dumps(log_access_url, sort_keys=True)) }}
{{ key }} = {{ dumps(value) }}
{% endfor %}
{% endif %}
{% for frontend_key, frontend_value in frontend_information.iteritems() %}
{{ frontend_key }} = {{ frontend_value }}
{% endfor %}
{% endfor %}
[buildout]
......
......@@ -18,6 +18,12 @@ LOCAL proxy.local.incoming_ip_to_bind STRING {{ ats_configuration['local-ip'] }}
CONFIG proxy.config.log.logfile_dir STRING {{ ats_directory['log'] }}
# Never change Server header
CONFIG proxy.config.http.response_server_enabled INT 0
# Handle Via header
CONFIG proxy.config.http.insert_request_via_str INT 1
CONFIG proxy.config.http.request_via_str STRING rapid-cdn-cache-{{ ats_configuration['node-id'] }}-{{ ats_configuration['version-hash'] }}
CONFIG proxy.config.http.insert_response_via_str INT 1
CONFIG proxy.config.http.response_via_str STRING rapid-cdn-cache-{{ ats_configuration['node-id'] }}-{{ ats_configuration['version-hash'] }}
# Implement RFC 5861 with core
CONFIG proxy.config.http.cache.open_write_fail_action INT 2
CONFIG proxy.config.body_factory.template_sets_dir STRING {{ ats_configuration['templates-dir'] }}
......@@ -53,13 +59,6 @@ CONFIG proxy.config.exec_thread.affinity INT 1
##############################################################################
CONFIG proxy.config.http.server_ports STRING {{ ats_configuration['local-ip'] + ':' + ats_configuration['input-port'] }}
##############################################################################
# Via: headers. Docs:
# https://docs.trafficserver.apache.org/records.config#proxy-config-http-insert-response-via-str
##############################################################################
CONFIG proxy.config.http.insert_request_via_str INT 1
CONFIG proxy.config.http.insert_response_via_str INT 0
##############################################################################
# Parent proxy configuration, in addition to these settings also see parent.config. Docs:
# https://docs.trafficserver.apache.org/records.config#parent-proxy-configuration
......
......@@ -53,6 +53,9 @@ import sys
import logging
import random
import string
from slapos.slap.standalone import SlapOSNodeInstanceError
import caucase.client
import caucase.utils
try:
......@@ -92,12 +95,6 @@ KEDIFA_PORT = '15080'
# has to be not partition one
SOURCE_IP = '127.0.0.1'
# ATS version expectation in Via string
VIA_STRING = (
r'^http\/1.1 caddy-frontend-1\[.*\] '
r'\(ApacheTrafficServer\/9\.[0-9]\.[0-9]+\)$',
)[0]
# IP on which test run, in order to mimic HTTP[s] access
TEST_IP = os.environ['SLAPOS_TEST_IPV4']
......@@ -299,7 +296,7 @@ class TestDataMixin(object):
def assertTestData(self, runtime_data, hash_value_dict=None, msg=None):
if hash_value_dict is None:
hash_value_dict = {}
filename = '%s-%s.txt' % (self.id(), 'CADDY')
filename = '%s-%s.txt' % (self.id().replace('zz_', ''), 'CADDY')
test_data_file = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'test_data', filename)
......@@ -344,7 +341,9 @@ class TestDataMixin(object):
runtime_data = '\n'.join(sorted(runtime_data))
self.assertTestData(runtime_data)
def test_file_list_log(self):
# convince test to be run last; it's a hack, but log files shall be checked
# after all other tests had chance to execute
def zz_test_file_list_log(self):
self._test_file_list(['var', 'log'], [
# no control at all when cron would kick in, ignore it
'cron.log',
......@@ -427,6 +426,8 @@ def fakeHTTPSResult(domain, path, port=HTTPS_PORT,
headers.setdefault('X-Forwarded-For', '192.168.0.1')
headers.setdefault('X-Forwarded-Proto', 'irc')
headers.setdefault('X-Forwarded-Port', '17')
# Expose some Via to show how nicely it arrives to the backend
headers.setdefault('Via', 'http/1.1 clientvia')
session = requests.Session()
if source_ip is not None:
......@@ -467,6 +468,8 @@ def fakeHTTPResult(domain, path, port=HTTP_PORT,
headers.setdefault('X-Forwarded-For', '192.168.0.1')
headers.setdefault('X-Forwarded-Proto', 'irc')
headers.setdefault('X-Forwarded-Port', '17')
# Expose some Via to show how nicely it arrives to the backend
headers.setdefault('Via', 'http/1.1 clientvia')
headers['Host'] = '%s:%s' % (domain, port)
session = requests.Session()
if source_ip is not None:
......@@ -569,9 +572,18 @@ class TestHandler(BaseHTTPRequestHandler):
header_dict[header] = value.strip()
if response is None:
if 'x-reply-body' not in self.headers.dict:
headers_dict = dict()
for header in self.headers.keys():
content = self.headers.getheaders(header)
if len(content) == 0:
headers_dict[header] = None
elif len(content) == 1:
headers_dict[header] = content[0]
else:
headers_dict[header] = content
response = {
'Path': self.path,
'Incoming Headers': self.headers.dict
'Incoming Headers': headers_dict
}
response = json.dumps(response, indent=2)
else:
......@@ -592,6 +604,8 @@ class TestHandler(BaseHTTPRequestHandler):
self.send_header('Set-Cookie', 'secured=value;secure')
self.send_header('Set-Cookie', 'nonsecured=value')
if 'Via' not in drop_header_list:
self.send_header('Via', 'http/1.1 backendvia')
if compress:
self.send_header('Content-Encoding', 'gzip')
out = StringIO.StringIO()
......@@ -741,25 +755,41 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
self.logger.warning(
'Process %s still alive' % (self.server_https_auth_process, ))
@classmethod
def _fetchKedifaCaucaseCaCertificateFile(cls, parameter_dict):
ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.kedifa_caucase_ca_certificate_file = os.path.join(
cls.working_directory, 'kedifa-caucase.ca.crt.pem')
open(cls.kedifa_caucase_ca_certificate_file, 'w').write(
ca_certificate.text)
@classmethod
def _fetchBackendClientCaCertificateFile(cls, parameter_dict):
ca_certificate = requests.get(
parameter_dict['backend-client-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.backend_client_caucase_ca_certificate_file = os.path.join(
cls.working_directory, 'backend-client-caucase.ca.crt.pem')
open(cls.backend_client_caucase_ca_certificate_file, 'w').write(
ca_certificate.text)
@classmethod
def setUpMaster(cls):
# run partition until AIKC finishes
cls.runComputerPartitionUntil(
cls.untilNotReadyYetNotInMasterKeyGenerateAuthUrl)
parameter_dict = cls.requestDefaultInstance().getConnectionParameterDict()
ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.ca_certificate_file = os.path.join(cls.working_directory, 'ca.crt.pem')
open(cls.ca_certificate_file, 'w').write(ca_certificate.text)
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
auth = requests.get(
parameter_dict['master-key-generate-auth-url'],
verify=cls.ca_certificate_file)
verify=cls.kedifa_caucase_ca_certificate_file)
assert auth.status_code == httplib.CREATED
upload = requests.put(
parameter_dict['master-key-upload-url'] + auth.text,
data=cls.key_pem + cls.certificate_pem,
verify=cls.ca_certificate_file)
verify=cls.kedifa_caucase_ca_certificate_file)
assert upload.status_code == httplib.CREATED
cls.runKedifaUpdater()
......@@ -846,7 +876,8 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
except Exception as e:
self.fail(e)
def assertResponseHeaders(self, result):
def assertResponseHeaders(
self, result, cached=False, via=True, backend_reached=True):
headers = result.headers.copy()
self.assertKeyWithPop('Date', headers)
# drop vary-keys
......@@ -855,8 +886,31 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
headers.pop('Keep-Alive', None)
headers.pop('Transfer-Encoding', None)
self.assertEqual('TestBackend', headers.pop('Server', ''))
if backend_reached:
self.assertEqual('TestBackend', headers.pop('Server', ''))
via_id = '%s-%s' % (
self.node_information_dict['node-id'],
self.node_information_dict['version-hash-history'].keys()[0])
if via:
self.assertIn('Via', headers)
if cached:
self.assertEqual(
'http/1.1 backendvia, '
'HTTP/1.1 rapid-cdn-backend-%(via_id)s, '
'http/1.0 rapid-cdn-cache-%(via_id)s, '
'HTTP/1.1 rapid-cdn-frontend-%(via_id)s' % dict(via_id=via_id),
headers.pop('Via')
)
else:
self.assertEqual(
'http/1.1 backendvia, '
'HTTP/1.1 rapid-cdn-backend-%(via_id)s, '
'HTTP/1.1 rapid-cdn-frontend-%(via_id)s' % dict(via_id=via_id),
headers.pop('Via')
)
else:
self.assertNotIn('Via', headers)
return headers
def assertLogAccessUrlWithPop(self, parameter_dict):
......@@ -926,6 +980,23 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
return generate_auth_url, upload_url
def assertNodeInformationWithPop(self, parameter_dict):
key = 'caddy-frontend-1-node-information-json'
node_information_json_dict = {}
for k in parameter_dict.keys():
if k.startswith('caddy-frontend') and k.endswith(
'node-information-json'):
node_information_json_dict[k] = parameter_dict.pop(k)
self.assertEqual(
[key],
node_information_json_dict.keys()
)
node_information_dict = json.loads(node_information_json_dict[key])
self.assertIn("node-id", node_information_dict)
self.assertIn("version-hash-history", node_information_dict)
self.node_information_dict = node_information_dict
def assertBackendHaproxyStatisticUrl(self, parameter_dict):
url_key = 'caddy-frontend-1-backend-haproxy-statistic-url'
backend_haproxy_statistic_url_dict = {}
......@@ -1063,6 +1134,17 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
cls.stopServerProcess()
super(HttpFrontendTestCase, cls)._cleanup(snapshot_name)
@classmethod
def _workingDirectorySetUp(cls):
# do working directory
cls.working_directory = os.path.join(os.path.realpath(
os.environ.get(
'SLAPOS_TEST_WORKING_DIR',
os.path.join(os.getcwd(), '.slapos'))),
'caddy-frontend-test')
if not os.path.isdir(cls.working_directory):
os.mkdir(cls.working_directory)
@classmethod
def setUpClass(cls):
try:
......@@ -1084,19 +1166,12 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
super(HttpFrontendTestCase, cls).setUpClass()
try:
cls._workingDirectorySetUp()
# expose instance directory
cls.instance_path = cls.slap.instance_directory
# expose software directory, extract from found computer partition
cls.software_path = os.path.realpath(os.path.join(
cls.computer_partition_root_path, 'software_release'))
# do working directory
cls.working_directory = os.path.join(os.path.realpath(
os.environ.get(
'SLAPOS_TEST_WORKING_DIR',
os.path.join(os.getcwd(), '.slapos'))),
'caddy-frontend-test')
if not os.path.isdir(cls.working_directory):
os.mkdir(cls.working_directory)
cls.setUpMaster()
cls.waitForCaddy()
except BaseException:
......@@ -1244,20 +1319,27 @@ class SlaveHttpFrontendTestCase(HttpFrontendTestCase):
]
)
def assertSlaveBase(self, reference):
def assertSlaveBase(
self, reference, expected_parameter_dict=None, hostname=None):
if expected_parameter_dict is None:
expected_parameter_dict = {}
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = reference.translate(None, '_-').lower()
self.current_generate_auth, self.current_upload_url = \
self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertNodeInformationWithPop(parameter_dict)
if hostname is None:
hostname = reference.translate(None, '_-').lower()
expected_parameter_dict.update(**{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
})
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
expected_parameter_dict,
parameter_dict
)
......@@ -1291,6 +1373,7 @@ class TestMasterRequestDomain(HttpFrontendTestCase, TestDataMixin):
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
......@@ -1322,6 +1405,122 @@ class TestMasterRequest(HttpFrontendTestCase, TestDataMixin):
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'domain': 'None',
'accepted-slave-amount': '0',
'rejected-slave-amount': '0',
'slave-amount': '0',
'rejected-slave-dict': {}},
parameter_dict
)
class TestMasterAIKCDisabledAIBCCDisabledRequest(
HttpFrontendTestCase, TestDataMixin):
@classmethod
def getInstanceParameterDict(cls):
return {
'port': HTTPS_PORT,
'plain_http_port': HTTP_PORT,
'kedifa_port': KEDIFA_PORT,
'caucase_port': CAUCASE_PORT,
'automatic-internal-kedifa-caucase-csr': 'false',
'automatic-internal-backend-client-caucase-csr': 'false',
}
@classmethod
def _setUpClass(cls):
instance_max_retry = cls.instance_max_retry
try:
cls.instance_max_retry = 3
super(TestMasterAIKCDisabledAIBCCDisabledRequest, cls)._setUpClass()
except SlapOSNodeInstanceError: # Note: SLAPOS_TEST_DEBUG=1 will interrupt
pass
else:
raise ValueError('_setUpClass unexpected success')
# Cluster requested without automatic certificate handling will never
# stabilize, as nodes can't join to the cluster, so the user is required
# to first manually create key and certificate for himself, then manually
# create certificates for services
cls._workingDirectorySetUp()
_, kedifa_key_pem, _, kedifa_csr_pem = createCSR('Kedifa User')
_, backend_client_key_pem, _, backend_client_csr_pem = createCSR(
'Backend Client User')
parameter_dict = cls.requestDefaultInstance(
).getConnectionParameterDict()
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
cls._fetchBackendClientCaCertificateFile(parameter_dict)
with open(cls.kedifa_caucase_ca_certificate_file) as fh:
kedifa_ca_pem = fh.read()
with open(cls.backend_client_caucase_ca_certificate_file) as fh:
backend_client_ca_pem = fh.read()
kedifa_caucase_url = parameter_dict['kedifa-caucase-url']
backend_client_caucase_url = parameter_dict['backend-client-caucase-url']
# Simulate human: create user keys
def getCauCertificate(ca_url, ca_pem, csr_pem):
cau_client = caucase.client.CaucaseClient(
ca_url=ca_url + '/cau',
ca_crt_pem_list=caucase.utils.getCertList(ca_pem),
)
csr_id = cau_client.createCertificateSigningRequest(csr_pem)
return cau_client.getCertificate(csr_id)
kedifa_crt_pem = getCauCertificate(
kedifa_caucase_url, kedifa_ca_pem, kedifa_csr_pem)
backend_client_crt_pem = getCauCertificate(
backend_client_caucase_url, backend_client_ca_pem,
backend_client_csr_pem)
kedifa_key_file = os.path.join(cls.working_directory, 'kedifa-key.pem')
with open(kedifa_key_file, 'w') as fh:
fh.write(kedifa_crt_pem + kedifa_key_pem)
backend_client_key_file = os.path.join(
cls.working_directory, 'backend-client-key.pem')
with open(backend_client_key_file, 'w') as fh:
fh.write(backend_client_crt_pem + backend_client_key_pem)
# Simulate human: create service keys
def signAllCasCsr(ca_url, ca_pem, user_key, pending_csr_amount):
client = caucase.client.CaucaseClient(
ca_url=ca_url + '/cas',
ca_crt_pem_list=caucase.utils.getCertList(ca_pem), user_key=user_key)
pending_csr_list = client.getPendingCertificateRequestList()
assert len(pending_csr_list) == pending_csr_amount
for csr_entry in pending_csr_list:
client.createCertificate(int(csr_entry['id']))
signAllCasCsr(kedifa_caucase_url, kedifa_ca_pem, kedifa_key_file, 2)
signAllCasCsr(
backend_client_caucase_url, backend_client_ca_pem,
backend_client_key_file, 1)
# Continue instance processing, copy&paste from
# slapos.testing.testcase.SlapOSInstanceTestCase._setUpClass
# as we hack a lot
cls.instance_max_retry = instance_max_retry
cls.waitForInstance()
cls.computer_partition = cls.requestDefaultInstance()
cls.computer_partition_root_path = os.path.join(
cls.slap._instance_root, cls.computer_partition.getId())
def test(self):
parameter_dict = self.parseConnectionParameterDict()
self.assertKeyWithPop('monitor-setup-url', parameter_dict)
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
self.assertKeyWithPop('kedifa-csr-certificate', parameter_dict)
self.assertKeyWithPop('kedifa-csr-url', parameter_dict)
self.assertKeyWithPop('caddy-frontend-1-kedifa-csr-url', parameter_dict)
self.assertKeyWithPop(
'caddy-frontend-1-backend-client-csr-url', parameter_dict)
self.assertKeyWithPop(
'caddy-frontend-1-csr-certificate', parameter_dict)
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
......@@ -1582,7 +1781,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
'enable_cache': True,
'disable-via-header': True,
},
'enable_cache-https-only': {
'enable_cache-https-only-false': {
'url': cls.backend_url,
'https-only': False,
'enable_cache': True,
......@@ -1778,6 +1977,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
self.assertNodeInformationWithPop(parameter_dict)
expected_parameter_dict = {
'monitor-base-url': 'https://[%s]:8401' % self._ipv6_address,
......@@ -1809,6 +2009,40 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
os.path.join(
partition_path, 'etc', 'httpd-cors.cfg'), 'r').read().strip())
def test_node_information_json(self):
node_information_file_path = glob.glob(os.path.join(
self.instance_path, '*', '.frontend-node-information.json'))[0]
with open(node_information_file_path, 'r') as fh:
current_node_information = json.load(fh)
modified_node_information = current_node_information.copy()
modified_node_information['version-hash-history'] = {'testhash': 'testurl'}
def writeNodeInformation(node_information, path):
with open(path, 'w') as fh:
json.dump(node_information, fh, sort_keys=True)
self.waitForInstance()
self.waitForInstance()
self.waitForInstance()
self.addCleanup(
writeNodeInformation, current_node_information,
node_information_file_path)
# simulate that upgrade happened
writeNodeInformation(
modified_node_information,
node_information_file_path)
parameter_dict = self.parseConnectionParameterDict()
expected_node_information = {
'node-id': current_node_information['node-id'],
'version-hash-history': current_node_information['version-hash-history']
}
expected_node_information['version-hash-history']['testhash'] = 'testurl'
self.assertEqual(
json.loads(parameter_dict['caddy-frontend-1-node-information-json']),
expected_node_information
)
def test_slave_partition_state(self):
partition_path = self.getSlavePartitionPath()
self.assertTrue(
......@@ -1895,7 +2129,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
def assertBackendHeaders(
self, backend_header_dict, domain, source_ip=SOURCE_IP, port=HTTPS_PORT,
proto='https', ignore_header_list=None):
proto='https', ignore_header_list=None, cached=False):
if ignore_header_list is None:
ignore_header_list = []
if 'Host' not in ignore_header_list:
......@@ -1914,6 +2148,28 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
backend_header_dict['x-forwarded-proto'],
proto
)
via_id = '%s-%s' % (
self.node_information_dict['node-id'],
self.node_information_dict['version-hash-history'].keys()[0])
if cached:
self.assertEqual(
[
'http/1.1 clientvia',
'HTTP/1.1 rapid-cdn-frontend-%(via_id)s, '
'http/1.1 rapid-cdn-cache-%(via_id)s' % dict(via_id=via_id),
'HTTP/1.1 rapid-cdn-backend-%(via_id)s' % dict(via_id=via_id)
],
backend_header_dict['via']
)
else:
self.assertEqual(
[
'http/1.1 clientvia',
'HTTP/1.1 rapid-cdn-frontend-%(via_id)s' % dict(via_id=via_id),
'HTTP/1.1 rapid-cdn-backend-%(via_id)s' % dict(via_id=via_id)
],
backend_header_dict['via']
)
def test_telemetry_disabled(self):
# here we trust that telemetry not present in error log means it was
......@@ -1925,26 +2181,14 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertNotIn('Sending telemetry', fh.read(), 'Telemetry enabled')
def test_url(self):
reference = 'Url'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = reference.translate(None, '_-').lower()
self.assertEqual(
parameter_dict = self.assertSlaveBase(
'Url',
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'warning-list': [
"slave url ' %s/?a=b&c= ' has been converted to '%s/?a=b&c='" % (
self.backend_url, self.backend_url)],
},
parameter_dict
}
)
result = fakeHTTPSResult(
parameter_dict['domain'],
'test-path/deep/.././deeper',
......@@ -1959,7 +2203,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.certificate_pem,
der2pem(result.peercert))
self.assertNotIn('Strict-Transport-Security', result.headers)
headers = self.assertResponseHeaders(result)
self.assertNotIn('Strict-Transport-Security', headers)
self.assertEqualResultJson(result, 'Path', '?a=b&c=/test-path/deeper')
try:
......@@ -1968,12 +2213,12 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
raise ValueError('JSON decode problem in:\n%s' % (result.text,))
self.assertEqual(j['Incoming Headers']['timeout'], '10')
self.assertFalse('Content-Encoding' in result.headers)
self.assertFalse('Content-Encoding' in headers)
self.assertBackendHeaders(j['Incoming Headers'], parameter_dict['domain'])
self.assertEqual(
'secured=value;secure, nonsecured=value',
result.headers['Set-Cookie']
headers['Set-Cookie']
)
self.assertLastLogLineRegexp(
......@@ -2004,9 +2249,11 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
result_http.status_code
)
headers = self.assertResponseHeaders(
result_http, via=False, backend_reached=False)
self.assertEqual(
'https://url.example.com:%s/test-path/deeper' % (HTTP_PORT,),
result_http.headers['Location']
headers['Location']
)
# check that timeouts are correctly set in the haproxy configuration
......@@ -2172,24 +2419,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_compressed_result(self):
reference = 'Url'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = reference.translate(None, '_-').lower()
self.assertEqual(
parameter_dict = self.assertSlaveBase(
'Url',
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'warning-list': [
"slave url ' %s/?a=b&c= ' has been converted to '%s/?a=b&c='" % (
self.backend_url, self.backend_url)],
},
parameter_dict
}
)
result_compressed = fakeHTTPSResult(
......@@ -2224,24 +2460,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertFalse('Content-Encoding' in result_not_compressed.headers)
def test_no_content_type_alter(self):
reference = 'Url'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = reference.translate(None, '_-').lower()
self.assertEqual(
parameter_dict = self.assertSlaveBase(
'Url',
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'warning-list': [
"slave url ' %s/?a=b&c= ' has been converted to '%s/?a=b&c='" % (
self.backend_url, self.backend_url)],
},
parameter_dict
}
)
result = fakeHTTPSResult(
parameter_dict['domain'],
......@@ -2410,20 +2635,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_server_alias_wildcard(self):
parameter_dict = self.parseSlaveParameterDict('server-alias-wildcard')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'serveraliaswildcard.example.com',
'replication_number': '1',
'url': 'http://serveraliaswildcard.example.com',
'site_url': 'http://serveraliaswildcard.example.com',
'secure_access': 'https://serveraliaswildcard.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('server-alias-wildcard')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -2450,20 +2662,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_server_alias_duplicated(self):
parameter_dict = self.parseSlaveParameterDict('server-alias-duplicated')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'serveraliasduplicated.example.com',
'replication_number': '1',
'url': 'http://serveraliasduplicated.example.com',
'site_url': 'http://serveraliasduplicated.example.com',
'secure_access': 'https://serveraliasduplicated.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('server-alias-duplicated')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -2484,22 +2683,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_server_alias_custom_domain_duplicated(self):
parameter_dict = self.parseSlaveParameterDict(
'server-alias_custom_domain-duplicated')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'alias4.example.com',
'replication_number': '1',
'url': 'http://alias4.example.com',
'site_url': 'http://alias4.example.com',
'secure_access': 'https://alias4.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'server-alias_custom_domain-duplicated', hostname='alias4')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -2515,27 +2700,13 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
raise NotImplementedError(self.id())
def test_ssl_ca_crt(self):
parameter_dict = self.parseSlaveParameterDict(
parameter_dict = self.assertSlaveBase(
'custom_domain_ssl_crt_ssl_key_ssl_ca_crt')
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'customdomainsslcrtsslkeysslcacrt.example.com',
'replication_number': '1',
'url': 'http://customdomainsslcrtsslkeysslcacrt.example.com',
'site_url': 'http://customdomainsslcrtsslkeysslcacrt.example.com',
'secure_access':
'https://customdomainsslcrtsslkeysslcacrt.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = self.customdomain_ca_certificate_pem + \
......@@ -2543,9 +2714,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.ca.certificate_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -2567,58 +2738,30 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqual(data, out.read())
def test_ssl_ca_crt_only(self):
parameter_dict = self.parseSlaveParameterDict('ssl_ca_crt_only')
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslcacrtonly.example.com',
'replication_number': '1',
'url': 'http://sslcacrtonly.example.com',
'site_url': 'http://sslcacrtonly.example.com',
'secure_access':
'https://sslcacrtonly.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
self.assertSlaveBase('ssl_ca_crt_only')
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = self.ca.certificate_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.UNPROCESSABLE_ENTITY, upload.status_code)
self.assertEqual('Key incorrect', upload.text)
def test_ssl_ca_crt_garbage(self):
parameter_dict = self.parseSlaveParameterDict('ssl_ca_crt_garbage')
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslcacrtgarbage.example.com',
'replication_number': '1',
'url': 'http://sslcacrtgarbage.example.com',
'site_url': 'http://sslcacrtgarbage.example.com',
'secure_access':
'https://sslcacrtgarbage.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('ssl_ca_crt_garbage')
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
_, ca_key_pem, csr, _ = createCSR(
......@@ -2627,9 +2770,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
data = ca_certificate_pem + ca_key_pem + 'some garbage'
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -2653,33 +2796,19 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqual(data, out.read())
def test_ssl_ca_crt_does_not_match(self):
parameter_dict = self.parseSlaveParameterDict('ssl_ca_crt_does_not_match')
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslcacrtdoesnotmatch.example.com',
'replication_number': '1',
'url': 'http://sslcacrtdoesnotmatch.example.com',
'site_url': 'http://sslcacrtdoesnotmatch.example.com',
'secure_access':
'https://sslcacrtdoesnotmatch.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('ssl_ca_crt_does_not_match')
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = self.certificate_pem + self.key_pem + self.ca.certificate_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -2721,22 +2850,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result_http, 'Path', '/test-path/deeper')
def test_custom_domain(self):
reference = 'custom_domain'
hostname = 'mycustomdomain'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'custom_domain', hostname='mycustomdomain')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -2748,22 +2863,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_custom_domain_server_alias(self):
reference = 'custom_domain_server_alias'
hostname = 'mycustomdomainserveralias'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'custom_domain_server_alias', hostname='mycustomdomainserveralias')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -2785,20 +2886,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
def test_custom_domain_wildcard(self):
parameter_dict = self.parseSlaveParameterDict('custom_domain_wildcard')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': '*.customdomain.example.com',
'replication_number': '1',
'url': 'http://*.customdomain.example.com',
'site_url': 'http://*.customdomain.example.com',
'secure_access': 'https://*.customdomain.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
self.assertSlaveBase(
'custom_domain_wildcard', hostname='*.customdomain')
result = fakeHTTPSResult(
'wild.customdomain.example.com',
......@@ -2811,35 +2900,19 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_custom_domain_ssl_crt_ssl_key(self):
reference = 'custom_domain_ssl_crt_ssl_key'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('custom_domain_ssl_crt_ssl_key')
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = self.customdomain_certificate_pem + \
self.customdomain_key_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -3084,22 +3157,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_type_notebook(self):
reference = 'type-notebook'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('type-notebook')
result = fakeHTTPSResult(
parameter_dict['domain'],
......@@ -3358,22 +3416,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_type_redirect_custom_domain(self):
reference = 'type-redirect-custom_domain'
hostname = 'customdomaintyperedirect'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'type-redirect-custom_domain', hostname='customdomaintyperedirect')
result = fakeHTTPSResult(
parameter_dict['domain'],
......@@ -3394,25 +3438,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_ssl_proxy_verify_ssl_proxy_ca_crt_unverified(self):
parameter_dict = self.parseSlaveParameterDict(
parameter_dict = self.assertSlaveBase(
'ssl-proxy-verify_ssl_proxy_ca_crt-unverified')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslproxyverifysslproxycacrtunverified.example.com',
'replication_number': '1',
'url': 'http://sslproxyverifysslproxycacrtunverified.example.com',
'site_url':
'http://sslproxyverifysslproxycacrtunverified.example.com',
'secure_access':
'https://sslproxyverifysslproxycacrtunverified.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -3603,22 +3631,9 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
def test_enable_cache_custom_domain(self):
reference = 'enable_cache_custom_domain'
hostname = 'customdomainenablecache'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'enable_cache_custom_domain',
hostname='customdomainenablecache')
result = fakeHTTPSResult(
parameter_dict['domain'],
......@@ -3628,7 +3643,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, True)
self.assertKeyWithPop('Age', headers)
self.assertEqual(
......@@ -3642,13 +3657,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
def test_enable_cache_server_alias(self):
parameter_dict = self.assertSlaveBase('enable_cache_server_alias')
......@@ -3661,7 +3671,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, cached=True)
self.assertKeyWithPop('Age', headers)
self.assertEqual(
......@@ -3675,13 +3685,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
result = fakeHTTPResult(
'enablecacheserveralias1.example.com',
......@@ -3699,8 +3704,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
result.headers['Location']
)
def test_enable_cache_https_only(self):
parameter_dict = self.assertSlaveBase('enable_cache-https-only')
def test_enable_cache_https_only_false(self):
parameter_dict = self.assertSlaveBase('enable_cache-https-only-false')
result = fakeHTTPSResult(
parameter_dict['domain'],
......@@ -3710,7 +3715,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, cached=True)
self.assertKeyWithPop('Age', headers)
self.assertEqual(
{
......@@ -3731,17 +3736,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqual(httplib.OK, result.status_code)
self.assertEqualResultJson(result, 'Path', '/HTTPS/test')
self.assertResponseHeaders(result)
result = fakeHTTPSResult(
parameter_dict['domain'],
'HTTP/test', headers={
'X-Reply-Header-Cache-Control': 'max-age=1, stale-while-'
'revalidate=3600, stale-if-error=3600'})
self.assertEqual(httplib.OK, result.status_code)
self.assertEqualResultJson(result, 'Path', '/HTTP/test')
self.assertResponseHeaders(result)
self.assertResponseHeaders(result, cached=True)
def test_enable_cache(self):
parameter_dict = self.assertSlaveBase('enable_cache')
......@@ -3758,7 +3753,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path/deeper')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, cached=True)
self.assertKeyWithPop('Age', headers)
......@@ -3773,13 +3768,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
# BEGIN: Check that squid.log is correctly filled in
ats_log_file_list = glob.glob(
......@@ -3967,13 +3957,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
# check stale-if-error support is really respected if not present in the
# request
......@@ -4089,7 +4074,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, cached=True)
self.assertKeyWithPop('Age', headers)
......@@ -4102,13 +4087,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
try:
j = result.json()
......@@ -4128,7 +4108,7 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqualResultJson(result, 'Path', '/test-path')
headers = self.assertResponseHeaders(result)
headers = self.assertResponseHeaders(result, via=False)
self.assertKeyWithPop('Age', headers)
......@@ -4141,13 +4121,8 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
)
backend_headers = result.json()['Incoming Headers']
self.assertBackendHeaders(backend_headers, parameter_dict['domain'])
via = backend_headers.pop('via', None)
self.assertNotEqual(via, None)
self.assertRegexpMatches(
via,
VIA_STRING
)
self.assertBackendHeaders(
backend_headers, parameter_dict['domain'], cached=True)
def test_enable_http2_false(self):
parameter_dict = self.assertSlaveBase('enable-http2-false')
......@@ -4518,6 +4493,24 @@ class TestReplicateSlave(SlaveHttpFrontendTestCase, TestDataMixin):
parameter_dict = self.parseSlaveParameterDict('replicate')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
key_list = [
'caddy-frontend-1-node-information-json',
'caddy-frontend-2-node-information-json'
]
node_information_json_dict = {}
for k in parameter_dict.keys():
if k.startswith('caddy-frontend') and k.endswith(
'node-information-json'):
node_information_json_dict[k] = parameter_dict.pop(k)
self.assertEqual(
key_list,
node_information_json_dict.keys()
)
node_information_dict = json.loads(node_information_json_dict[key_list[0]])
self.assertIn("node-id", node_information_dict)
self.assertIn("version-hash-history", node_information_dict)
self.node_information_dict = node_information_dict
self.assertEqual(
{
'domain': 'replicate.example.com',
......@@ -4646,61 +4639,19 @@ class TestEnableHttp2ByDefaultFalseSlave(SlaveHttpFrontendTestCase,
}
def test_enable_http2_default(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2default.example.com',
'replication_number': '1',
'url': 'http://enablehttp2default.example.com',
'site_url': 'http://enablehttp2default.example.com',
'secure_access':
'https://enablehttp2default.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-default')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
def test_enable_http2_false(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-false')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2false.example.com',
'replication_number': '1',
'url': 'http://enablehttp2false.example.com',
'site_url': 'http://enablehttp2false.example.com',
'secure_access':
'https://enablehttp2false.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-false')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
def test_enable_http2_true(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-true')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2true.example.com',
'replication_number': '1',
'url': 'http://enablehttp2true.example.com',
'site_url': 'http://enablehttp2true.example.com',
'secure_access':
'https://enablehttp2true.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-true')
self.assertTrue(
isHTTP2(parameter_dict['domain']))
......@@ -4736,61 +4687,19 @@ class TestEnableHttp2ByDefaultDefaultSlave(SlaveHttpFrontendTestCase,
}
def test_enable_http2_default(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2default.example.com',
'replication_number': '1',
'url': 'http://enablehttp2default.example.com',
'site_url': 'http://enablehttp2default.example.com',
'secure_access':
'https://enablehttp2default.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-default')
self.assertTrue(
isHTTP2(parameter_dict['domain']))
def test_enable_http2_false(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-false')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2false.example.com',
'replication_number': '1',
'url': 'http://enablehttp2false.example.com',
'site_url': 'http://enablehttp2false.example.com',
'secure_access':
'https://enablehttp2false.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-false')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
def test_enable_http2_true(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-true')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2true.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'replication_number': '1',
'url': 'http://enablehttp2true.example.com',
'site_url': 'http://enablehttp2true.example.com',
'secure_access':
'https://enablehttp2true.example.com',
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-true')
self.assertTrue(
isHTTP2(parameter_dict['domain']))
......@@ -4801,6 +4710,7 @@ class TestRe6stVerificationUrlDefaultSlave(SlaveHttpFrontendTestCase,
@classmethod
def getInstanceParameterDict(cls):
return {
'domain': 'example.com',
'port': HTTPS_PORT,
'plain_http_port': HTTP_PORT,
'kedifa_port': KEDIFA_PORT,
......@@ -4822,20 +4732,7 @@ class TestRe6stVerificationUrlDefaultSlave(SlaveHttpFrontendTestCase,
return True
def test_default(self):
parameter_dict = self.parseSlaveParameterDict('default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'default.None',
'replication_number': '1',
'url': 'http://default.None',
'site_url': 'http://default.None',
'secure_access': 'https://default.None',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
self.assertSlaveBase('default')
re6st_connectivity_promise_list = glob.glob(
os.path.join(
......@@ -4888,20 +4785,7 @@ class TestRe6stVerificationUrlSlave(SlaveHttpFrontendTestCase,
except Exception:
pass
parameter_dict = self.parseSlaveParameterDict('default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'domain': 'default.example.com',
'replication_number': '1',
'url': 'http://default.example.com',
'site_url': 'http://default.example.com',
'secure_access': 'https://default.example.com',
},
parameter_dict
)
self.assertSlaveBase('default')
re6st_connectivity_promise_list = glob.glob(
os.path.join(
......@@ -4928,21 +4812,7 @@ class TestSlaveGlobalDisableHttp2(TestSlave):
return instance_parameter_dict
def test_enable_http2_default(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2default.example.com',
'replication_number': '1',
'url': 'http://enablehttp2default.example.com',
'site_url': 'http://enablehttp2default.example.com',
'secure_access':
'https://enablehttp2default.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-default')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -4977,21 +4847,7 @@ class TestEnableHttp2ByDefaultFalseSlaveGlobalDisableHttp2(
return instance_parameter_dict
def test_enable_http2_true(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-true')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2true.example.com',
'replication_number': '1',
'url': 'http://enablehttp2true.example.com',
'site_url': 'http://enablehttp2true.example.com',
'secure_access':
'https://enablehttp2true.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-true')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
......@@ -5008,41 +4864,13 @@ class TestEnableHttp2ByDefaultDefaultSlaveGlobalDisableHttp2(
return instance_parameter_dict
def test_enable_http2_true(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-true')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2true.example.com',
'replication_number': '1',
'url': 'http://enablehttp2true.example.com',
'site_url': 'http://enablehttp2true.example.com',
'secure_access':
'https://enablehttp2true.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-true')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
def test_enable_http2_default(self):
parameter_dict = self.parseSlaveParameterDict('enable-http2-default')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'enablehttp2default.example.com',
'replication_number': '1',
'url': 'http://enablehttp2default.example.com',
'site_url': 'http://enablehttp2default.example.com',
'secure_access':
'https://enablehttp2default.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('enable-http2-default')
self.assertFalse(
isHTTP2(parameter_dict['domain']))
......@@ -5057,11 +4885,7 @@ class TestSlaveSlapOSMasterCertificateCompatibilityOverrideMaster(
cls.untilNotReadyYetNotInMasterKeyGenerateAuthUrl)
parameter_dict = cls.requestDefaultInstance().getConnectionParameterDict()
ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.ca_certificate_file = os.path.join(cls.working_directory, 'ca.crt.pem')
open(cls.ca_certificate_file, 'w').write(ca_certificate.text)
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
# Do not upload certificates for the master partition
@classmethod
......@@ -5087,22 +4911,8 @@ class TestSlaveSlapOSMasterCertificateCompatibilityOverrideMaster(
}
def test_ssl_from_master_kedifa_overrides_master_certificate(self):
reference = 'ssl_from_master_kedifa_overrides_master_certificate'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'ssl_from_master_kedifa_overrides_master_certificate')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5120,11 +4930,11 @@ class TestSlaveSlapOSMasterCertificateCompatibilityOverrideMaster(
self.requestDefaultInstance().getConnectionParameterDict()
auth = requests.get(
master_parameter_dict['master-key-generate-auth-url'],
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
requests.put(
master_parameter_dict['master-key-upload-url'] + auth.text,
data=key_pem + certificate_pem,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.runKedifaUpdater()
result = fakeHTTPSResult(
......@@ -5147,11 +4957,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
cls.untilNotReadyYetNotInMasterKeyGenerateAuthUrl)
parameter_dict = cls.requestDefaultInstance().getConnectionParameterDict()
ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.ca_certificate_file = os.path.join(cls.working_directory, 'ca.crt.pem')
open(cls.ca_certificate_file, 'w').write(ca_certificate.text)
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
# Do not upload certificates for the master partition
@classmethod
......@@ -5288,6 +5094,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertKeyWithPop('monitor-setup-url', parameter_dict)
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertNodeInformationWithPop(parameter_dict)
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
......@@ -5355,21 +5162,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
)
def test_ssl_from_master(self):
parameter_dict = self.parseSlaveParameterDict('ssl_from_master')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = 'ssl_from_master'.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('ssl_from_master')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5381,22 +5174,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_ssl_from_master_kedifa_overrides(self):
reference = 'ssl_from_master_kedifa_overrides'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('ssl_from_master_kedifa_overrides')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5412,16 +5190,16 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = certificate_pem + key_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -5435,27 +5213,14 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_ssl_from_slave(self):
reference = 'ssl_from_slave'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'ssl_from_slave',
expected_parameter_dict={
'warning-list': [
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url',
]
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5467,27 +5232,12 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_ssl_from_slave_kedifa_overrides(self):
reference = 'ssl_from_slave_kedifa_overrides'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'warning-list': [
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url',
]
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'ssl_from_slave_kedifa_overrides',
expected_parameter_dict={
'warning-list': ['ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url']
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5503,16 +5253,16 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = certificate_pem + key_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -5527,22 +5277,7 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_type_notebook_ssl_from_master(self):
reference = 'type-notebook-ssl_from_master'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('type-notebook-ssl_from_master')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path',
......@@ -5555,22 +5290,8 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_type_notebook_ssl_from_master_kedifa_overrides(self):
reference = 'type-notebook-ssl_from_master_kedifa_overrides'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'type-notebook-ssl_from_master_kedifa_overrides')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path',
......@@ -5587,16 +5308,16 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = certificate_pem + key_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -5612,26 +5333,14 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_type_notebook_ssl_from_slave(self):
reference = 'type-notebook-ssl_from_slave'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'type-notebook-ssl_from_slave',
expected_parameter_dict={
'warning-list': [
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url',
]
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path',
......@@ -5644,26 +5353,12 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_type_notebook_ssl_from_slave_kedifa_overrides(self):
reference = 'type-notebook-ssl_from_slave_kedifa_overrides'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
'warning-list': [
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url',
]
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'type-notebook-ssl_from_slave_kedifa_overrides',
expected_parameter_dict={
'warning-list': ['ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url']
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path',
......@@ -5680,16 +5375,16 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
# as now the place to put the key is known put the key there
auth = requests.get(
generate_auth,
verify=self.ca_certificate_file)
self.current_generate_auth,
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, auth.status_code)
data = certificate_pem + key_pem
upload = requests.put(
upload_url + auth.text,
self.current_upload_url + auth.text,
data=data,
verify=self.ca_certificate_file)
verify=self.kedifa_caucase_ca_certificate_file)
self.assertEqual(httplib.CREATED, upload.status_code)
self.runKedifaUpdater()
......@@ -5706,25 +5401,12 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
@skip('Not implemented in new test system')
def test_custom_domain_ssl_crt_ssl_key(self):
reference = 'custom_domain_ssl_crt_ssl_key'
parameter_dict = self.parseSlaveParameterDict(reference)
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
hostname = reference.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'custom_domain_ssl_crt_ssl_key',
expected_parameter_dict={
'warning-list': ['ssl_key is obsolete, please use key-upload-url',
'ssl_crt is obsolete, please use key-upload-url']
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5736,27 +5418,15 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_ssl_ca_crt(self):
parameter_dict = self.parseSlaveParameterDict(
'custom_domain_ssl_crt_ssl_key_ssl_ca_crt')
self.assertLogAccessUrlWithPop(parameter_dict)
generate_auth, upload_url = self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'customdomainsslcrtsslkeysslcacrt.example.com',
'replication_number': '1',
'url': 'http://customdomainsslcrtsslkeysslcacrt.example.com',
'site_url': 'http://customdomainsslcrtsslkeysslcacrt.example.com',
'secure_access':
'https://customdomainsslcrtsslkeysslcacrt.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'custom_domain_ssl_crt_ssl_key_ssl_ca_crt',
expected_parameter_dict={
'warning-list': [
'ssl_ca_crt is obsolete, please use key-upload-url',
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url'
]
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5825,25 +5495,14 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
)
def test_ssl_ca_crt_garbage(self):
parameter_dict = self.parseSlaveParameterDict('ssl_ca_crt_garbage')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslcacrtgarbage.example.com',
'replication_number': '1',
'url': 'http://sslcacrtgarbage.example.com',
'site_url': 'http://sslcacrtgarbage.example.com',
'secure_access':
'https://sslcacrtgarbage.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'ssl_ca_crt_garbage',
expected_parameter_dict={
'warning-list': [
'ssl_ca_crt is obsolete, please use key-upload-url',
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url']
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5856,26 +5515,15 @@ class TestSlaveSlapOSMasterCertificateCompatibility(
self.assertEqualResultJson(result, 'Path', '/test-path')
def test_ssl_ca_crt_does_not_match(self):
parameter_dict = self.parseSlaveParameterDict('ssl_ca_crt_does_not_match')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'sslcacrtdoesnotmatch.example.com',
'replication_number': '1',
'url': 'http://sslcacrtdoesnotmatch.example.com',
'site_url': 'http://sslcacrtdoesnotmatch.example.com',
'secure_access':
'https://sslcacrtdoesnotmatch.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
parameter_dict = self.assertSlaveBase(
'ssl_ca_crt_does_not_match',
expected_parameter_dict={
'warning-list': [
'ssl_ca_crt is obsolete, please use key-upload-url',
'ssl_crt is obsolete, please use key-upload-url',
'ssl_key is obsolete, please use key-upload-url'
]
},
parameter_dict
)
})
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -5907,11 +5555,7 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
cls.untilNotReadyYetNotInMasterKeyGenerateAuthUrl)
parameter_dict = cls.requestDefaultInstance().getConnectionParameterDict()
ca_certificate = requests.get(
parameter_dict['kedifa-caucase-url'] + '/cas/crt/ca.crt.pem')
assert ca_certificate.status_code == httplib.OK
cls.ca_certificate_file = os.path.join(cls.working_directory, 'ca.crt.pem')
open(cls.ca_certificate_file, 'w').write(ca_certificate.text)
cls._fetchKedifaCaucaseCaCertificateFile(parameter_dict)
# Do not upload certificates for the master partition
instance_parameter_dict = {
......@@ -5946,6 +5590,7 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
self.assertKeyWithPop('monitor-setup-url', parameter_dict)
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertNodeInformationWithPop(parameter_dict)
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
......@@ -5968,21 +5613,7 @@ class TestSlaveSlapOSMasterCertificateCompatibilityUpdate(
)
def test_apache_key_apache_certificate_update(self):
parameter_dict = self.parseSlaveParameterDict('ssl_from_master')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, '')
hostname = 'ssl_from_master'.translate(None, '_-')
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('ssl_from_master')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -6051,6 +5682,7 @@ class TestSlaveCiphers(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertKeyWithPop('monitor-setup-url', parameter_dict)
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertNodeInformationWithPop(parameter_dict)
self.assertRejectedSlavePromiseEmptyWithPop(parameter_dict)
expected_parameter_dict = {
......@@ -6319,6 +5951,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
self.assertKeyWithPop('monitor-setup-url', parameter_dict)
self.assertBackendHaproxyStatisticUrl(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertNodeInformationWithPop(parameter_dict)
self.assertRejectedSlavePromiseWithPop(parameter_dict)
expected_parameter_dict = {
......@@ -6408,6 +6041,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_url(self):
parameter_dict = self.parseSlaveParameterDict('URL')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6418,6 +6052,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_https_url(self):
parameter_dict = self.parseSlaveParameterDict('HTTPS-URL')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6429,6 +6064,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_ssl_proxy_verify_ssl_proxy_ca_crt_damaged(self):
parameter_dict = self.parseSlaveParameterDict(
'SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_DAMAGED')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{'request-error-list': ["ssl_proxy_ca_crt is invalid"]},
parameter_dict
......@@ -6437,6 +6073,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_ssl_proxy_verify_ssl_proxy_ca_crt_empty(self):
parameter_dict = self.parseSlaveParameterDict(
'SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_EMPTY')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{'request-error-list': ["ssl_proxy_ca_crt is invalid"]},
parameter_dict
......@@ -6445,6 +6082,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_health_check_failover_ssl_proxy_ca_crt_damaged(self):
parameter_dict = self.parseSlaveParameterDict(
'health-check-failover-SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_DAMAGED')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6456,6 +6094,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_health_check_failover_ssl_proxy_ca_crt_empty(self):
parameter_dict = self.parseSlaveParameterDict(
'health-check-failover-SSL-PROXY-VERIFY_SSL_PROXY_CA_CRT_EMPTY')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6465,20 +6104,8 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
)
def test_server_alias_same(self):
parameter_dict = self.parseSlaveParameterDict('SERVER-ALIAS-SAME')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'serveraliassame.example.com',
'replication_number': '1',
'url': 'http://serveraliassame.example.com',
'site_url': 'http://serveraliassame.example.com',
'secure_access': 'https://serveraliassame.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase(
'SERVER-ALIAS-SAME')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -6491,6 +6118,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_custom_domain_unsafe(self):
parameter_dict = self.parseSlaveParameterDict('CUSTOM_DOMAIN-UNSAFE')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6502,6 +6130,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_server_alias_unsafe(self):
parameter_dict = self.parseSlaveParameterDict('SERVER-ALIAS-UNSAFE')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6513,6 +6142,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_bad_ciphers(self):
parameter_dict = self.parseSlaveParameterDict('BAD-CIPHERS')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6526,6 +6156,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_virtualhostroot_http_port_unsafe(self):
parameter_dict = self.parseSlaveParameterDict(
'VIRTUALHOSTROOT-HTTP-PORT-UNSAFE')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6538,6 +6169,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_virtualhostroot_https_port_unsafe(self):
parameter_dict = self.parseSlaveParameterDict(
'VIRTUALHOSTROOT-HTTPS-PORT-UNSAFE')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6551,6 +6183,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
parameter_dict = self.parseSlaveParameterDict('DEFAULT-PATH-UNSAFE')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict, 'master-')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'defaultpathunsafe.example.com',
......@@ -6582,20 +6215,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
)
def test_monitor_ipv4_test_unsafe(self):
parameter_dict = self.parseSlaveParameterDict('MONITOR-IPV4-TEST-UNSAFE')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'monitoripv4testunsafe.example.com',
'replication_number': '1',
'url': 'http://monitoripv4testunsafe.example.com',
'site_url': 'http://monitoripv4testunsafe.example.com',
'secure_access': 'https://monitoripv4testunsafe.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('MONITOR-IPV4-TEST-UNSAFE')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -6626,20 +6246,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
)
def test_monitor_ipv6_test_unsafe(self):
parameter_dict = self.parseSlaveParameterDict('MONITOR-IPV6-TEST-UNSAFE')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'monitoripv6testunsafe.example.com',
'replication_number': '1',
'url': 'http://monitoripv6testunsafe.example.com',
'site_url': 'http://monitoripv6testunsafe.example.com',
'secure_access': 'https://monitoripv6testunsafe.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
parameter_dict = self.assertSlaveBase('MONITOR-IPV6-TEST-UNSAFE')
result = fakeHTTPSResult(
parameter_dict['domain'], 'test-path')
......@@ -6668,23 +6275,11 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
)
def test_site_1(self):
parameter_dict = self.parseSlaveParameterDict('SITE_1')
self.assertLogAccessUrlWithPop(parameter_dict)
self.assertKedifaKeysWithPop(parameter_dict)
self.assertEqual(
{
'domain': 'duplicate.example.com',
'replication_number': '1',
'url': 'http://duplicate.example.com',
'site_url': 'http://duplicate.example.com',
'secure_access': 'https://duplicate.example.com',
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict
)
self.assertSlaveBase('SITE_1', hostname='duplicate')
def test_site_2(self):
parameter_dict = self.parseSlaveParameterDict('SITE_2')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': ["custom_domain 'duplicate.example.com' clashes"]
......@@ -6694,6 +6289,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_site_3(self):
parameter_dict = self.parseSlaveParameterDict('SITE_3')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': ["server-alias 'duplicate.example.com' clashes"]
......@@ -6703,6 +6299,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_site_4(self):
parameter_dict = self.parseSlaveParameterDict('SITE_4')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': ["custom_domain 'duplicate.example.com' clashes"]
......@@ -6712,7 +6309,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_ssl_ca_crt_only(self):
parameter_dict = self.parseSlaveParameterDict('SSL_CA_CRT_ONLY')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
parameter_dict,
{
......@@ -6726,6 +6323,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_ssl_key_ssl_crt_unsafe(self):
parameter_dict = self.parseSlaveParameterDict('SSL_KEY-SSL_CRT-UNSAFE')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': ["slave ssl_key and ssl_crt does not match"],
......@@ -6738,6 +6336,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_bad_backend(self):
parameter_dict = self.parseSlaveParameterDict('BAD-BACKEND')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6749,6 +6348,7 @@ class TestSlaveRejectReportUnsafeDamaged(SlaveHttpFrontendTestCase):
def test_empty_backend(self):
parameter_dict = self.parseSlaveParameterDict('EMPTY-BACKEND')
self.assertNodeInformationWithPop(parameter_dict)
self.assertEqual(
{
'request-error-list': [
......@@ -6791,36 +6391,10 @@ class TestSlaveHostHaproxyClash(SlaveHttpFrontendTestCase, TestDataMixin):
}
def test(self):
parameter_dict_wildcard = self.parseSlaveParameterDict('wildcard')
self.assertLogAccessUrlWithPop(parameter_dict_wildcard)
self.assertKedifaKeysWithPop(parameter_dict_wildcard, '')
hostname = '*.alias1'
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict_wildcard
)
parameter_dict_specific = self.parseSlaveParameterDict('zspecific')
self.assertLogAccessUrlWithPop(parameter_dict_specific)
self.assertKedifaKeysWithPop(parameter_dict_specific, '')
hostname = 'zspecific.alias1'
self.assertEqual(
{
'domain': '%s.example.com' % (hostname,),
'replication_number': '1',
'url': 'http://%s.example.com' % (hostname, ),
'site_url': 'http://%s.example.com' % (hostname, ),
'secure_access': 'https://%s.example.com' % (hostname, ),
'backend-client-caucase-url': 'http://[%s]:8990' % self._ipv6_address,
},
parameter_dict_specific
)
self.assertSlaveBase(
'wildcard', hostname='*.alias1')
self.assertSlaveBase(
'zspecific', hostname='zspecific.alias1')
result_wildcard = fakeHTTPSResult(
'other.alias1.example.com',
......@@ -7100,6 +6674,9 @@ class TestPassedRequestParameter(HttpFrontendTestCase):
're6st-verification-url': 're6st-verification-url',
'request-timeout': '100',
'root_instance_title': 'testing partition 0',
'slap_computer_id': 'local',
'slap_computer_partition_id': 'T-0',
'slap_software_release_url': base_software_url,
'slap_software_type': 'RootSoftwareInstance',
'slave_instance_list': []
}
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_dummy-cached_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_dummy-cached_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_dummy-cached_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_dummy-cached_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/etc/cron.d/logrotate
T-0/etc/cron.d/monitor-configurator
T-0/etc/cron.d/monitor-globalstate
T-0/etc/cron.d/monitor_collect
T-1/etc/cron.d/logrotate
T-1/etc/cron.d/monitor-configurator
T-1/etc/cron.d/monitor-globalstate
T-1/etc/cron.d/monitor_collect
T-2/etc/cron.d/logrotate
T-2/etc/cron.d/monitor-configurator
T-2/etc/cron.d/monitor-globalstate
T-2/etc/cron.d/monitor_collect
T-2/etc/cron.d/trafficserver-logrotate
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/monitor-httpd-access.log
T-2/var/log/monitor-httpd-error.log
T-2/var/log/slave-introspection-access.log
T-2/var/log/slave-introspection-error.log
T-2/var/log/trafficserver/manager.log
T-0/etc/plugin/__init__.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
T-0/etc/plugin/check-backend-haproxy-statistic-url-caddy-frontend-1.py
T-0/etc/plugin/check-free-disk-space.py
T-0/etc/plugin/monitor-bootstrap-status.py
T-0/etc/plugin/monitor-http-frontend.py
T-0/etc/plugin/monitor-httpd-listening-on-tcp.py
T-0/etc/plugin/rejected-slave-publish-ip-port-listening.py
T-0/etc/plugin/rejected-slave.py
T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
T-1/etc/plugin/monitor-httpd-listening-on-tcp.py
T-1/etc/plugin/promise-logrotate-setup.py
T-2/etc/plugin/__init__.py
T-2/etc/plugin/backend-client-caucase-updater.py
T-2/etc/plugin/backend-haproxy-configuration.py
T-2/etc/plugin/backend-haproxy-statistic-frontend.py
T-2/etc/plugin/backend_haproxy_http.py
T-2/etc/plugin/backend_haproxy_https.py
T-2/etc/plugin/buildout-T-2-status.py
T-2/etc/plugin/caddy_frontend_ipv4_http.py
T-2/etc/plugin/caddy_frontend_ipv4_https.py
T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
T-2/etc/plugin/monitor-httpd-listening-on-tcp.py
T-2/etc/plugin/promise-logrotate-setup.py
T-2/etc/plugin/re6st-connectivity.py
T-2/etc/plugin/slave-introspection-configuration.py
T-2/etc/plugin/slave_introspection_https.py
T-2/etc/plugin/trafficserver-cache-availability.py
T-2/etc/plugin/trafficserver-port-listening.py
T-0/var/run/monitor-httpd.pid
T-1/var/run/kedifa.pid
T-1/var/run/monitor-httpd.pid
T-2/var/run/backend-haproxy-rsyslogd.pid
T-2/var/run/backend-haproxy.pid
T-2/var/run/backend_haproxy_configuration_last_state
T-2/var/run/backend_haproxy_graceful_configuration_state_signature
T-2/var/run/bhlog.sck
T-2/var/run/graceful_configuration_state_signature
T-2/var/run/httpd.pid
T-2/var/run/monitor-httpd.pid
T-2/var/run/slave-introspection.pid
T-2/var/run/slave_introspection_configuration_last_state
T-2/var/run/slave_introspection_graceful_configuration_state_signature
T-0:bootstrap-monitor EXITED
T-0:caucased-backend-client-{hash-generic}-on-watch RUNNING
T-0:certificate_authority-{hash-generic}-on-watch RUNNING
T-0:crond-{hash-generic}-on-watch RUNNING
T-0:monitor-httpd-{hash-generic}-on-watch RUNNING
T-0:monitor-httpd-graceful EXITED
T-0:rejected-slave-publish-{hash-rejected-slave-publish}-on-watch RUNNING
T-1:bootstrap-monitor EXITED
T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
T-1:monitor-httpd-graceful EXITED
T-2:6tunnel-11080-{hash-generic}-on-watch RUNNING
T-2:6tunnel-11443-{hash-generic}-on-watch RUNNING
T-2:backend-client-login-certificate-caucase-updater-on-watch RUNNING
T-2:backend-haproxy-{hash-generic}-on-watch RUNNING
T-2:backend-haproxy-rsyslogd-{hash-generic}-on-watch RUNNING
T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
T-2:kedifa-updater-{hash-generic}-on-watch RUNNING
T-2:monitor-httpd-{hash-generic}-on-watch RUNNING
T-2:monitor-httpd-graceful EXITED
T-2:slave-instrospection-nginx-{hash-generic}-on-watch RUNNING
T-2:slave-introspection-safe-graceful EXITED
T-2:trafficserver-{hash-generic}-on-watch RUNNING
T-2:trafficserver-reload EXITED
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/monitor-httpd-access.log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/monitor-httpd-access.log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_default_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_default_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_replicate_access_log
......@@ -18,7 +18,7 @@ T-2/var/log/slave-introspection-access.log
T-2/var/log/slave-introspection-error.log
T-2/var/log/trafficserver/manager.log
T-3/var/log/backend-haproxy.log
T-3/var/log/expose-csr_id.log
T-3/var/log/expose-csr.log
T-3/var/log/frontend-access.log
T-3/var/log/frontend-error.log
T-3/var/log/httpd/_replicate_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -15,7 +17,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -34,7 +36,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......@@ -58,7 +60,7 @@ T-3/etc/plugin/caddy_frontend_ipv6_http.py
T-3/etc/plugin/caddy_frontend_ipv6_https.py
T-3/etc/plugin/caucase-updater.py
T-3/etc/plugin/check-free-disk-space.py
T-3/etc/plugin/expose-csr_id-ip-port-listening.py
T-3/etc/plugin/expose-csr-ip-port-listening.py
T-3/etc/plugin/frontend-caddy-configuration-promise.py
T-3/etc/plugin/monitor-bootstrap-status.py
T-3/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......@@ -46,7 +46,7 @@ T-3:backend-haproxy-safe-graceful EXITED
T-3:bootstrap-monitor EXITED
T-3:certificate_authority-{hash-generic}-on-watch STOPPED
T-3:crond-{hash-generic}-on-watch STOPPED
T-3:expose-csr_id-{hash-generic}-on-watch STOPPED
T-3:expose-csr-{hash-generic}-on-watch STOPPED
T-3:frontend-caddy-safe-graceful EXITED
T-3:frontend_caddy-{hash-caddy-T-3}-on-watch STOPPED
T-3:kedifa-login-certificate-caucase-updater-on-watch STOPPED
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_Url_access_log
......@@ -58,9 +58,9 @@ T-2/var/log/httpd/_enable_cache-disable-no-cache-request_error_log
T-2/var/log/httpd/_enable_cache-disable-via-header_access_log
T-2/var/log/httpd/_enable_cache-disable-via-header_backend_log
T-2/var/log/httpd/_enable_cache-disable-via-header_error_log
T-2/var/log/httpd/_enable_cache-https-only_access_log
T-2/var/log/httpd/_enable_cache-https-only_backend_log
T-2/var/log/httpd/_enable_cache-https-only_error_log
T-2/var/log/httpd/_enable_cache-https-only-false_access_log
T-2/var/log/httpd/_enable_cache-https-only-false_backend_log
T-2/var/log/httpd/_enable_cache-https-only-false_error_log
T-2/var/log/httpd/_enable_cache_access_log
T-2/var/log/httpd/_enable_cache_backend_log
T-2/var/log/httpd/_enable_cache_custom_domain_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -35,7 +37,7 @@ T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-_monitor-ipv4-test-ipv4-packet-list-test.py
T-2/etc/plugin/check-_monitor-ipv6-test-ipv6-packet-list-test.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_default_ciphers_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_Url_access_log
......@@ -58,9 +58,9 @@ T-2/var/log/httpd/_enable_cache-disable-no-cache-request_error_log
T-2/var/log/httpd/_enable_cache-disable-via-header_access_log
T-2/var/log/httpd/_enable_cache-disable-via-header_backend_log
T-2/var/log/httpd/_enable_cache-disable-via-header_error_log
T-2/var/log/httpd/_enable_cache-https-only_access_log
T-2/var/log/httpd/_enable_cache-https-only_backend_log
T-2/var/log/httpd/_enable_cache-https-only_error_log
T-2/var/log/httpd/_enable_cache-https-only-false_access_log
T-2/var/log/httpd/_enable_cache-https-only-false_backend_log
T-2/var/log/httpd/_enable_cache-https-only-false_error_log
T-2/var/log/httpd/_enable_cache_access_log
T-2/var/log/httpd/_enable_cache_backend_log
T-2/var/log/httpd/_enable_cache_custom_domain_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -35,7 +37,7 @@ T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-_monitor-ipv4-test-ipv4-packet-list-test.py
T-2/etc/plugin/check-_monitor-ipv6-test-ipv6-packet-list-test.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_health-check-connect_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_wildcard_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_custom_domain_ssl_crt_ssl_key_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_ssl_from_master_kedifa_overrides_master_certificate_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
T-0/var/log/monitor-httpd-access.log
T-0/var/log/monitor-httpd-error.log
T-0/var/log/slapgrid-T-0-error.log
T-1/var/log/expose-csr_id.log
T-1/var/log/expose-csr.log
T-1/var/log/kedifa.log
T-1/var/log/monitor-httpd-access.log
T-1/var/log/monitor-httpd-error.log
T-2/var/log/backend-haproxy.log
T-2/var/log/expose-csr_id.log
T-2/var/log/expose-csr.log
T-2/var/log/frontend-access.log
T-2/var/log/frontend-error.log
T-2/var/log/httpd/_ssl_from_master_access_log
......
T-0/etc/plugin/__init__.py
T-0/etc/plugin/aibcc-sign-promise.py
T-0/etc/plugin/aibcc-user-caucase-updater.py
T-0/etc/plugin/aikc-sign-promise.py
T-0/etc/plugin/aikc-user-caucase-updater.py
T-0/etc/plugin/buildout-T-0-status.py
T-0/etc/plugin/caucased-backend-client.py
......@@ -14,7 +16,7 @@ T-1/etc/plugin/__init__.py
T-1/etc/plugin/buildout-T-1-status.py
T-1/etc/plugin/caucased.py
T-1/etc/plugin/check-free-disk-space.py
T-1/etc/plugin/expose-csr_id-ip-port-listening.py
T-1/etc/plugin/expose-csr-ip-port-listening.py
T-1/etc/plugin/kedifa-http-reply.py
T-1/etc/plugin/monitor-bootstrap-status.py
T-1/etc/plugin/monitor-http-frontend.py
......@@ -33,7 +35,7 @@ T-2/etc/plugin/caddy_frontend_ipv6_http.py
T-2/etc/plugin/caddy_frontend_ipv6_https.py
T-2/etc/plugin/caucase-updater.py
T-2/etc/plugin/check-free-disk-space.py
T-2/etc/plugin/expose-csr_id-ip-port-listening.py
T-2/etc/plugin/expose-csr-ip-port-listening.py
T-2/etc/plugin/frontend-caddy-configuration-promise.py
T-2/etc/plugin/monitor-bootstrap-status.py
T-2/etc/plugin/monitor-http-frontend.py
......
......@@ -12,7 +12,7 @@ T-1:caucase-updater-on-watch RUNNING
T-1:caucased-{hash-generic}-on-watch RUNNING
T-1:certificate_authority-{hash-generic}-on-watch RUNNING
T-1:crond-{hash-generic}-on-watch RUNNING
T-1:expose-csr_id-{hash-generic}-on-watch RUNNING
T-1:expose-csr-{hash-generic}-on-watch RUNNING
T-1:kedifa-{hash-generic}-on-watch RUNNING
T-1:kedifa-reloader EXITED
T-1:monitor-httpd-{hash-generic}-on-watch RUNNING
......@@ -26,7 +26,7 @@ T-2:backend-haproxy-safe-graceful EXITED
T-2:bootstrap-monitor EXITED
T-2:certificate_authority-{hash-generic}-on-watch RUNNING
T-2:crond-{hash-generic}-on-watch RUNNING
T-2:expose-csr_id-{hash-generic}-on-watch RUNNING
T-2:expose-csr-{hash-generic}-on-watch RUNNING
T-2:frontend-caddy-safe-graceful EXITED
T-2:frontend_caddy-{hash-caddy-T-2}-on-watch RUNNING
T-2:kedifa-login-certificate-caucase-updater-on-watch RUNNING
......
[buildout]
extends =
../../stack/slapos.cfg
../../component/macros/virtual-env.cfg
parts =
instance
slapos-cookbook
[python]
part = python3
[django-env]
<= virtual-env-base
location = ${buildout:directory}/activate
eggs = Django
[instance]
recipe = slapos.recipe.template
output = ${buildout:directory}/instance.cfg
inline =
[buildout]
parts = publish
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
[publish]
recipe = slapos.cookbook:publish
activate-script = ${django-env:location}
[versions]
Django = 3.2.12
sqlparse = 0.4.2
pytz = 2021.3
asgiref = 3.3.2
typing-extensions = 4.1.1:whl
......@@ -111,6 +111,16 @@
"default": false,
"type": "boolean"
},
"character-set-server": {
"description": "The server default character set",
"default": "utf8mb4",
"type": "string"
},
"collation-server": {
"description": "The server default collation",
"default": "utf8mb4_general_ci",
"type": "string"
},
"ssl": {
"description": "Enable and define SSL support for network connections",
"default": {},
......
......@@ -25,7 +25,7 @@ eggs =
<= go-git-package
go.importpath = lab.nexedi.com/nexedi/galene
repository = https://lab.nexedi.com/nexedi/galene.git
revision = galene-0.4
revision = galene-0.5
[gowork]
install =
......
......@@ -27,9 +27,6 @@ part = python3
[nodejs]
<= nodejs-14.16.0
[yarn]
<= yarn-1.22.10
[go_github.com_grafana_grafana]
<= go-git-package
......
......@@ -71,7 +71,7 @@ stop-on-error = true
command = set -e
cd ${jio-repository.git:location}
PATH=${git:location}/bin:${nodejs:location}/bin:$PATH
${nodejs:location}/bin/npm install jslint@0.9.2 jison@0.4.16 git://github.com/qunitjs/node-qunit.git#v0.9.3 sinon@1.7.3
${nodejs:location}/bin/npm install jslint@0.9.2 jison@0.4.16 https://github.com/qunitjs/node-qunit.git#v0.9.3 sinon@1.7.3
make
update-command = ${:command}
......
......@@ -19,7 +19,7 @@ md5sum = 087f226ba90928dcc5a722d7008c867a
[template-kvm]
filename = instance-kvm.cfg.jinja2
md5sum = d949e6100fd4e01819cc2867a65c6e65
md5sum = 2ff55931eab48f7992e8e1cb16b44b95
[template-kvm-cluster]
filename = instance-kvm-cluster.cfg.jinja2.in
......@@ -55,7 +55,7 @@ md5sum = a8cf453d20f01c707f02c4b4014580d8
[template-kvm-run]
filename = template/template-kvm-run.in
md5sum = 875261817970d0f83335824373288b9d
md5sum = 395ee373ccda3382d257fde1ff4222b0
[template-kvm-controller]
filename = template/kvm-controller-run.in
......@@ -79,11 +79,11 @@ md5sum = d57764bb7135037b4d21543b2f56ce1d
[image-download-controller]
filename = template/image-download-controller.py
md5sum = 9c67058edcc4edae0b57956c0932a9fc
md5sum = 4d48b3da5bc611fc6533335b5953c840
[image-download-config-creator]
filename = template/image-download-config-creator.py
md5sum = 54261e418ab9860efe73efd514c4d47f
md5sum = 8fbe05c4175a7f31b6bffced9ad4e91d
[whitelist-firewall-download-controller]
filename = template/whitelist-firewall-download-controller.py
......
......@@ -162,7 +162,8 @@ config-filename = ${boot-image-url-select-json-config:error-state-file}
# wrapper to execute boot-image-url-select-download on each run
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:scripts}/boot-image-url-select-updater
command-line = {{ python_executable }} {{ image_download_controller }} ${boot-image-url-select-json-config:rendered} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${boot-image-url-select-processed-config:processed-md5sum}
command-line = {{ python_executable }} {{ image_download_controller }} ${:config} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${boot-image-url-select-processed-config:processed-md5sum}
config = ${boot-image-url-select-json-config:rendered}
md5sum-state-filename = boot-image-url-select-download-controller-md5sum-fail.json
md5sum-state-file = ${directory:boot-image-url-select-expose}/${:md5sum-state-filename}
error-state-filename = boot-image-url-select-download-controller-error.text
......@@ -258,7 +259,8 @@ config-filename = ${boot-image-url-list-json-config:error-state-file}
# wrapper to execute boot-image-url-list-download on each run
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:scripts}/boot-image-url-list-updater
command-line = {{ python_executable }} {{ image_download_controller }} ${boot-image-url-list-json-config:rendered} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${boot-image-url-list-processed-config:processed-md5sum}
command-line = {{ python_executable }} {{ image_download_controller }} ${:config} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${boot-image-url-list-processed-config:processed-md5sum}
config = ${boot-image-url-list-json-config:rendered}
md5sum-state-filename = boot-image-url-list-download-controller-md5sum-fail.json
md5sum-state-file = ${directory:boot-image-url-list-expose}/${:md5sum-state-filename}
error-state-filename = boot-image-url-list-download-controller-error.text
......@@ -355,7 +357,8 @@ config-filename = ${virtual-hard-drive-url-json-config:error-state-file}
# wrapper to execute virtual-hard-drive-url-download on each run
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:scripts}/virtual-hard-drive-url-updater
command-line = {{ python_executable }} {{ image_download_controller }} ${virtual-hard-drive-url-json-config:rendered} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${virtual-hard-drive-url-processed-config:processed-md5sum}
command-line = {{ python_executable }} {{ image_download_controller }} ${:config} {{ curl_executable_location }} ${:md5sum-state-file} ${:error-state-file} ${virtual-hard-drive-url-processed-config:processed-md5sum}
config = ${virtual-hard-drive-url-json-config:rendered}
md5sum-state-filename = virtual-hard-drive-url-download-controller-md5sum-fail.json
md5sum-state-file = ${directory:virtual-hard-drive-url-expose}/${:md5sum-state-filename}
error-state-filename = virtual-hard-drive-url-download-controller-error.text
......@@ -547,13 +550,13 @@ command = [ ! -f {{ '${' + key + '}' }} ] && touch {{ '${' + key + '}' }}
{%- endmacro %}
{#- Create depending sections, as state files appear late, so it's better to have empty file which will impact the hash anyway #}
{%- if boot_image_url_list_enabled %}
{{ generate_depend_section('boot-image-url-list-depend', 'boot-image-url-list-download-wrapper:md5sum-state-file') }}
{{ generate_depend_section('boot-image-url-list-depend', 'boot-image-url-list-download-wrapper:config') }}
{%- endif %}
{%- if boot_image_url_select_enabled %}
{{ generate_depend_section('boot-image-url-select-depend', 'boot-image-url-select-download-wrapper:md5sum-state-file') }}
{{ generate_depend_section('boot-image-url-select-depend', 'boot-image-url-select-download-wrapper:config') }}
{%- endif %}
{%- if virtual_hard_drive_url_enabled %}
{{ generate_depend_section('virtual-hard-drive-url-depend', 'virtual-hard-drive-url-download-wrapper:md5sum-state-file') }}
{{ generate_depend_section('virtual-hard-drive-url-depend', 'virtual-hard-drive-url-download-wrapper:config') }}
{%- endif %}
[kvm-instance]
......@@ -879,6 +882,7 @@ ipv6-network-info =
{% if use_tap and slap_configuration.get('tap-ipv4-addr') -%}
{% do part_list.append('network-config-ipv4') %}
[network-config-ipv4]
recipe = plone.recipe.command
filename = netconfig.sh
......
......@@ -51,7 +51,7 @@ if __name__ == "__main__":
'url': url,
'destination': md5sum,
'destination-tmp': md5sum + '_tmp',
'link': 'image_%03i' % (image_number,),
'image-number': '%03i' % (image_number,),
})
else:
print('INF: checksum %s repeated, used url %s' % (url, ))
......
......@@ -41,11 +41,13 @@ if __name__ == "__main__":
print('ERR: There are problems with configuration')
print('INF: Storing errors in %s' % (error_state_file,))
# switch to error state during image download
with open(error_state_file, 'w') as fh:
fh.write('\n'.join(['INF Download in progress']))
# clean the destination directory
file_to_keep_list = []
for image in config['image-list']:
file_to_keep_list.append(image['destination'])
file_to_keep_list.append(image['link'])
for fname in os.listdir(config['destination-directory']):
if fname not in file_to_keep_list:
print('INF: Removing obsolete %s' % (fname,))
......@@ -118,20 +120,6 @@ if __name__ == "__main__":
os.rename(destination_tmp, destination)
print('INF: %s : Stored with checksum %s' % (
image['url'], image['md5sum']))
for image in config['image-list']:
destination = os.path.join(
config['destination-directory'], image['destination'])
link = os.path.join(config['destination-directory'], image['link'])
if os.path.exists(destination):
if os.path.lexists(link):
if not os.path.islink(link):
os.remove(link)
if os.path.islink(link) and os.readlink(link) != destination:
os.remove(link)
if not os.path.lexists(link):
print('INF: %s : Symlinking %s -> %s' % (
image['url'], link, destination))
os.symlink(destination, link)
with open(md5sum_fail_file, 'w') as fh:
if new_md5sum_state_dict != {}:
json.dump(new_md5sum_state_dict, fh, indent=2)
......
......@@ -329,6 +329,22 @@ if cpu_model:
if rgx.match(cpu_model):
kvm_argument_list.extend(['-cpu', cpu_model])
def handle_image(config, name):
with open(config) as fh:
image_config = json.load(fh)
if image_config['error-amount'] == 0:
for image in sorted(image_config['image-list'], key=lambda k: k['image-number']):
destination = os.path.join(image_config['destination-directory'], image['destination'])
if os.path.exists(destination):
kvm_argument_list.extend([
'-drive',
'file=%s,media=cdrom' % (destination,)
])
else:
raise ValueError('%s not ready yet' % (name,))
else:
raise ValueError('%s not ready yet' % (name,))
# Try to connect to NBD server (and second nbd if defined).
# If not available, don't even specify it in qemu command line parameters.
# Reason: if qemu starts with unavailable NBD drive, it will just crash.
......@@ -350,33 +366,10 @@ else:
# Debian installation CDs, rendering it uninstallable
if boot_image_url_select_json_config:
# Support boot-image-url-select
with open(boot_image_url_select_json_config) as fh:
image_config = json.load(fh)
if image_config['error-amount'] == 0:
for image in sorted(image_config['image-list'], key=lambda k: k['link']):
link = os.path.join(image_config['destination-directory'], image['link'])
if os.path.exists(link) and os.path.islink(link):
kvm_argument_list.extend([
'-drive',
'file=%s,media=cdrom' % (link,)
])
else:
raise ValueError('boot-image-url-select not ready yet')
handle_image(boot_image_url_select_json_config, 'boot-image-url-select')
if boot_image_url_list_json_config:
# Support boot-image-url-list
with open(boot_image_url_list_json_config) as fh:
image_config = json.load(fh)
if image_config['error-amount'] == 0:
for image in sorted(image_config['image-list'], key=lambda k: k['link']):
link = os.path.join(image_config['destination-directory'], image['link'])
if os.path.exists(link) and os.path.islink(link):
kvm_argument_list.extend([
'-drive',
'file=%s,media=cdrom' % (link,)
])
else:
raise ValueError('boot-image-url-list not ready yet')
handle_image(boot_image_url_list_json_config, 'boot-image-url-list')
# Always add by default the default image
kvm_argument_list.extend([
'-drive', 'file=%s,media=cdrom' % default_cdrom_iso
......
......@@ -36,7 +36,6 @@ import requests
import six
import slapos.util
import sqlite3
import stat
from six.moves.urllib.parse import parse_qs, urlparse
import unittest
import subprocess
......@@ -443,6 +442,12 @@ class TestAccessDefaultBootstrap(MonitorAccessMixin, InstanceTestCase):
result.status_code
)
self.assertIn('<title>noVNC</title>', result.text)
# check that expected files to configure the VM are exposed by the instance
self.assertEqual(
['delDefaultIface', 'netconfig.sh'],
sorted(os.listdir(os.path.join(
self.computer_partition_root_path, 'srv', 'public')))
)
@skipUnlessKvm
......@@ -746,49 +751,61 @@ class FakeImageHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
class FakeImageServerMixin(KvmMixin):
def startImageHttpServer(self):
self.image_source_directory = tempfile.mkdtemp()
@classmethod
def startImageHttpServer(cls):
cls.image_source_directory = tempfile.mkdtemp()
server = SocketServer.TCPServer(
(self._ipv4_address, findFreeTCPPort(self._ipv4_address)),
(cls._ipv4_address, findFreeTCPPort(cls._ipv4_address)),
FakeImageHandler)
# c89f17758be13adeb06886ef935d5ff1
fake_image_content = b'fake_image_content'
self.fake_image_md5sum = hashlib.md5(fake_image_content).hexdigest()
cls.fake_image_md5sum = hashlib.md5(fake_image_content).hexdigest()
with open(os.path.join(
self.image_source_directory, self.fake_image_md5sum), 'wb') as fh:
cls.image_source_directory, cls.fake_image_md5sum), 'wb') as fh:
fh.write(fake_image_content)
# bc81d2aee81e030c6cee210c802339c2
fake_image2_content = b'fake_image2_content'
self.fake_image2_md5sum = hashlib.md5(fake_image2_content).hexdigest()
cls.fake_image2_md5sum = hashlib.md5(fake_image2_content).hexdigest()
with open(os.path.join(
self.image_source_directory, self.fake_image2_md5sum), 'wb') as fh:
cls.image_source_directory, cls.fake_image2_md5sum), 'wb') as fh:
fh.write(fake_image2_content)
self.fake_image_wrong_md5sum = self.fake_image2_md5sum
cls.fake_image_wrong_md5sum = cls.fake_image2_md5sum
# c5ef5d70ad5a0dbfd890a734f588e344
fake_image3_content = b'fake_image3_content'
cls.fake_image3_md5sum = hashlib.md5(fake_image3_content).hexdigest()
with open(os.path.join(
cls.image_source_directory, cls.fake_image3_md5sum), 'wb') as fh:
fh.write(fake_image3_content)
url = 'http://%s:%s' % server.server_address
self.fake_image = '/'.join([url, self.fake_image_md5sum])
self.fake_image2 = '/'.join([url, self.fake_image2_md5sum])
cls.fake_image = '/'.join([url, cls.fake_image_md5sum])
cls.fake_image2 = '/'.join([url, cls.fake_image2_md5sum])
cls.fake_image3 = '/'.join([url, cls.fake_image3_md5sum])
old_dir = os.path.realpath(os.curdir)
os.chdir(self.image_source_directory)
os.chdir(cls.image_source_directory)
try:
self.server_process = multiprocessing.Process(
cls.server_process = multiprocessing.Process(
target=server.serve_forever, name='FakeImageHttpServer')
self.server_process.start()
cls.server_process.start()
finally:
os.chdir(old_dir)
def stopImageHttpServer(self):
self.logger.debug('Stopping process %s' % (self.server_process,))
self.server_process.join(10)
self.server_process.terminate()
@classmethod
def stopImageHttpServer(cls):
cls.logger.debug('Stopping process %s' % (cls.server_process,))
cls.server_process.join(10)
cls.server_process.terminate()
time.sleep(0.1)
if self.server_process.is_alive():
self.logger.warning(
'Process %s still alive' % (self.server_process, ))
if cls.server_process.is_alive():
cls.logger.warning(
'Process %s still alive' % (cls.server_process, ))
shutil.rmtree(self.image_source_directory)
shutil.rmtree(cls.image_source_directory)
@skipUnlessKvm
......@@ -799,6 +816,7 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
# variations
key = 'boot-image-url-list'
test_input = "%s#%s\n%s#%s"
empty_input = ""
image_directory = 'boot-image-url-list-repository'
config_state_promise = 'boot-image-url-list-config-state-promise.py'
download_md5sum_promise = 'boot-image-url-list-download-md5sum-promise.py'
......@@ -824,12 +842,21 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
@classmethod
def getInstanceParameterDict(cls):
# start with empty, but working configuration
return {}
return {
cls.key: cls.test_input % (
cls.fake_image, cls.fake_image_md5sum, cls.fake_image2,
cls.fake_image2_md5sum)
}
def setUp(self):
super(InstanceTestCase, self).setUp()
self.startImageHttpServer()
@classmethod
def setUpClass(cls):
cls.startImageHttpServer()
super(InstanceTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(InstanceTestCase, cls).tearDownClass()
cls.stopImageHttpServer()
def tearDown(self):
# clean up the instance for other tests
......@@ -839,7 +866,6 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
# 2nd ...move instance to "default" state
self.rerequestInstance({})
self.slap.waitForInstance(max_retry=10)
self.stopImageHttpServer()
super(InstanceTestCase, self).tearDown()
def getRunningImageList(self, kvm_instance_partition,
......@@ -856,57 +882,56 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
m = _match_cdrom(entry)
if m:
path = m.group(1)
st = os.stat(path)
if stat.S_ISREG(st.st_mode) and st.st_size:
image_list.append(
_sub_iso(r'\1-${ver}\3',
sub_shared(r'${shared}/',
path.replace(kvm_instance_partition, '${inst}')
)))
image_list.append(
_sub_iso(r'\1-${ver}\3',
sub_shared(r'${shared}/',
path.replace(kvm_instance_partition, '${inst}')
)))
return image_list
def test(self):
partition_parameter_kw = {
self.key: self.test_input % (
self.fake_image, self.fake_image_md5sum, self.fake_image2,
self.fake_image2_md5sum)
}
self.rerequestInstance(partition_parameter_kw)
self.slap.waitForInstance(max_retry=10)
# check that image is correctly downloaded and linked
# check that image is correctly downloaded
kvm_instance_partition = os.path.join(
self.slap.instance_directory, self.kvm_instance_partition_reference)
image_repository = os.path.join(
kvm_instance_partition, 'srv', self.image_directory)
image = os.path.join(image_repository, self.fake_image_md5sum)
image_link = os.path.join(image_repository, 'image_001')
self.assertTrue(os.path.exists(image))
with open(image, 'rb') as fh:
image_md5sum = hashlib.md5(fh.read()).hexdigest()
self.assertEqual(image_md5sum, self.fake_image_md5sum)
self.assertTrue(os.path.islink(image_link))
self.assertEqual(os.readlink(image_link), image)
image2 = os.path.join(image_repository, self.fake_image2_md5sum)
image2_link = os.path.join(image_repository, 'image_002')
self.assertTrue(os.path.exists(image2))
with open(image2, 'rb') as fh:
image2_md5sum = hashlib.md5(fh.read()).hexdigest()
self.assertEqual(image2_md5sum, self.fake_image2_md5sum)
self.assertTrue(os.path.islink(image2_link))
self.assertEqual(os.readlink(image2_link), image2)
# mimic the requirement: restart the instance by requesting it stopped and
# then started started, like user have to do it
self.rerequestInstance(partition_parameter_kw, state='stopped')
self.slap.waitForInstance(max_retry=1)
self.rerequestInstance(partition_parameter_kw, state='started')
self.slap.waitForInstance(max_retry=3)
self.assertEqual(
[
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image_md5sum),
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum),
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
)
# Switch image
self.rerequestInstance({
self.key: self.test_input % (
self.fake_image3, self.fake_image3_md5sum,
self.fake_image2, self.fake_image2_md5sum)
})
self.slap.waitForInstance(max_retry=10)
self.assertTrue(os.path.exists(os.path.join(
image_repository, self.fake_image3_md5sum)))
self.assertTrue(os.path.exists(os.path.join(
image_repository, self.fake_image2_md5sum)))
self.assertEqual(
[
'${inst}/srv/%s/image_001' % self.image_directory,
'${inst}/srv/%s/image_002' % self.image_directory,
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image3_md5sum),
'${inst}/srv/%s/%s' % (self.image_directory, self.fake_image2_md5sum),
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
......@@ -914,21 +939,16 @@ class TestBootImageUrlList(InstanceTestCase, FakeImageServerMixin):
# cleanup of images works, also asserts that configuration changes are
# reflected
partition_parameter_kw[self.key] = ''
self.rerequestInstance(partition_parameter_kw)
self.slap.waitForInstance(max_retry=2)
# Note: key is left and empty_input is provided, as otherwise the part
# which generate images is simply removed, which can lead to
# leftover
self.rerequestInstance({self.key: self.empty_input})
self.slap.waitForInstance(max_retry=10)
self.assertEqual(
os.listdir(image_repository),
[]
)
# mimic the requirement: restart the instance by requesting it stopped and
# then started started, like user have to do it
self.rerequestInstance(partition_parameter_kw, state='stopped')
self.slap.waitForInstance(max_retry=1)
self.rerequestInstance(partition_parameter_kw, state='started')
self.slap.waitForInstance(max_retry=3)
# again only default image is available in the running process
self.assertEqual(
['${shared}/debian-${ver}-amd64-netinst.iso'],
......@@ -1013,6 +1033,7 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
# variations
key = 'boot-image-url-select'
test_input = '["%s#%s", "%s#%s"]'
empty_input = '[]'
image_directory = 'boot-image-url-select-repository'
config_state_promise = 'boot-image-url-select-config-state-promise.py'
download_md5sum_promise = 'boot-image-url-select-download-md5sum-promise.py'
......@@ -1048,35 +1069,27 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
}
self.rerequestInstance(partition_parameter_kw)
self.slap.waitForInstance(max_retry=10)
# check that image is correctly downloaded and linked
# check that image is correctly downloaded
for image_directory in [
'boot-image-url-list-repository', 'boot-image-url-select-repository']:
image_repository = os.path.join(
self.slap.instance_directory, self.kvm_instance_partition_reference,
'srv', image_directory)
image = os.path.join(image_repository, self.fake_image_md5sum)
image_link = os.path.join(image_repository, 'image_001')
self.assertTrue(os.path.exists(image))
with open(image, 'rb') as fh:
image_md5sum = hashlib.md5(fh.read()).hexdigest()
self.assertEqual(image_md5sum, self.fake_image_md5sum)
self.assertTrue(os.path.islink(image_link))
self.assertEqual(os.readlink(image_link), image)
kvm_instance_partition = os.path.join(
self.slap.instance_directory, self.kvm_instance_partition_reference)
# mimic the requirement: restart the instance by requesting it stopped and
# then started started, like user have to do it
self.rerequestInstance(partition_parameter_kw, state='stopped')
self.slap.waitForInstance(max_retry=1)
self.rerequestInstance(partition_parameter_kw, state='started')
self.slap.waitForInstance(max_retry=3)
self.assertEqual(
[
'${inst}/srv/boot-image-url-select-repository/image_001',
'${inst}/srv/boot-image-url-list-repository/image_001',
'${inst}/srv/boot-image-url-select-repository/%s' % (
self.fake_image_md5sum,),
'${inst}/srv/boot-image-url-list-repository/%s' % (
self.fake_image_md5sum,),
'${shared}/debian-${ver}-amd64-netinst.iso',
],
self.getRunningImageList(kvm_instance_partition)
......@@ -1107,13 +1120,6 @@ class TestBootImageUrlSelect(TestBootImageUrlList):
[]
)
# mimic the requirement: restart the instance by requesting it stopped and
# then started started, like user have to do it
self.rerequestInstance(partition_parameter_kw, state='stopped')
self.slap.waitForInstance(max_retry=1)
self.rerequestInstance(partition_parameter_kw, state='started')
self.slap.waitForInstance(max_retry=3)
# again only default image is available in the running process
self.assertEqual(
['${shared}/debian-${ver}-amd64-netinst.iso'],
......@@ -1480,7 +1486,7 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
'destination-tmp': 'tmp',
'url': self.fake_image,
'destination': 'destination',
'link': 'image_001',
'image-number': '001',
'gzipped': False,
'md5sum': self.fake_image_md5sum,
}
......@@ -1495,12 +1501,10 @@ class TestImageDownloadController(InstanceTestCase, FakeImageServerMixin):
INF: Storing errors in %(error_state_file)s
INF: %(fake_image)s : Downloading
INF: %(fake_image)s : Stored with checksum %(checksum)s
INF: %(fake_image)s : Symlinking %(symlink)s -> %(destination)s
""".strip() % {
'fake_image': self.fake_image,
'checksum': self.fake_image_md5sum,
'error_state_file': self.error_state_file,
'symlink': os.path.join(self.destination_directory, 'image_001'),
'destination': os.path.join(self.destination_directory, 'destination'),
})
)
......@@ -1527,7 +1531,6 @@ INF: %(fake_image)s : already downloaded
'fake_image': self.fake_image,
'checksum': self.fake_image_md5sum,
'error_state_file': self.error_state_file,
'symlink': os.path.join(self.destination_directory, 'image_001'),
'destination': os.path.join(self.destination_directory, 'destination'),
})
)
......@@ -1542,7 +1545,7 @@ INF: %(fake_image)s : already downloaded
'destination-tmp': 'tmp',
'url': self.fake_image,
'destination': 'destination',
'link': 'image_001',
'image-number': '001',
'gzipped': False,
'md5sum': self.fake_image_wrong_md5sum,
}
......@@ -1560,7 +1563,6 @@ INF: %(fake_image)s : Downloading
""". strip() % {
'fake_image': self.fake_image,
'error_state_file': self.error_state_file,
'symlink': os.path.join(self.destination_directory, 'image_001'),
'destination': os.path.join(
self.destination_directory, 'destination'),
})
......@@ -1596,7 +1598,6 @@ INF: Storing errors in %(error_state_file)s
""". strip() % {
'fake_image': self.fake_image,
'error_state_file': self.error_state_file,
'symlink': os.path.join(self.destination_directory, 'image_001'),
'destination': os.path.join(
self.destination_directory, 'destination'),
})
......
<VirtualHost *:{{ parameter_dict['port'] }}>
ServerAdmin admin@example.com
DocumentRoot {{ parameter_dict['document-root'] }}/matomo
SetEnvIf Origin "^http(s)?://(.+\.)?(app\.officejs\.com)$" ORIGIN_DOMAIN=$0
Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Credentials "true" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Methods "PROPFIND, PROPPATCH, COPY, MOVE, DELETE, MKCOL, LOCK, UNLOCK, PUT, GETLIB, VERSION-CONTROL, CHECKIN, CHECKOUT, UNCHECKOUT, REPORT, UPDATE, CANCELUPLOAD, HEAD, OPTIONS, GET, POST" env=ORIGIN_DOMAIN
Header always set Access-Control-Allow-Headers "Overwrite, Destination, Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Authorization" env=ORIGIN_DOMAIN
Header always set Strict-Transport-Security "max-age=15552000; includeSubDomains"
<Directory {{ parameter_dict['document-root'] }}>
Options +FollowSymlinks
AllowOverride All
Require all granted
SetEnv HOME {{ parameter_dict['document-root'] }}
SetEnv HTTP_HOME {{ parameter_dict['document-root'] }}
Dav off
</Directory>
ErrorLog "{{ parameter_dict['log-dir'] }}/matomo-error.log"
CustomLog "{{ parameter_dict['log-dir'] }}/matomo-access.log" combined
</VirtualHost>
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template-apache-httpd]
filename = apache-httpd.conf.in
md5sum = 9940e05d5e624a7884f4e6e062355798
[template-matomo-instance]
filename = matomo-instance.cfg.in
md5sum = 34925db56b5f97e5a29080fb83fbefa8
[template-matomo-backup.sh]
filename = matomo-backup.sh.in
md5sum = fb29ad59813ef62c3f5934d4a0d90e14
#!/bin/sh
set -e
set -x
#checkout if directory and matomo resources exist
if [ ! -d {{ parameter_dict['document-root'] }}/matomo/config ]; then
exit 1;
fi
if [ ! -f {{ parameter_dict['document-root'] }}/matomo/config/config.ini.php ]; then
exit 0;
fi
trap 'rm -rf "$TMPFILE"' EXIT TERM INT
#we reserve backup folder in a month
{{ parameter_dict['find-bin'] }}/bin/find {{ parameter_dict['dir-backup'] }} -mtime +30 -type d |{{ parameter_dict['find-bin'] }}/bin/xargs rm -rf
#backup
TMPFILE=$(mktemp -d -p "{{ parameter_dict['dir-backup'] }}")
{{ php_bin }} {{ parameter_dict['document-root'] }}/matomo/console plugin:list > $TMPFILE/plugins_list
cp -rf {{ parameter_dict['document-root'] }}/matomo/config $TMPFILE/config
cp -rf {{ parameter_dict['document-root'] }}/matomo/plugins $TMPFILE/plugins
#check if copy-action finish well
if test ! -z "$({{ parameter_dict['diff-bin'] }}/bin/diff -r $TMPFILE/config {{ parameter_dict['document-root'] }}/matomo/config)"; then
exit 1
fi
if test ! -z "$({{ parameter_dict['diff-bin'] }}/bin/diff -r $TMPFILE/plugins {{ parameter_dict['document-root'] }}/matomo/plugins)"; then
exit 1
fi
#check if the file plugins_list has been created
if [ ! -f $TMPFILE/plugins_list ]; then
exit 1
fi
#if all things go well, change the name of folder with date now
if [ -d $TMPFILE ]; then
mv -f $TMPFILE {{ parameter_dict['dir-backup'] }}/`date +%m-%d-%Y-%T`
fi
exit 0
# parameters required by the configuration instance
[instance-parameter]
matomo = ${:document-root}
dir-backup = ${directory:backup}
find-bin = {{ findutils_location }}
diff-bin = {{ diffutils_location }}
#php.ini parameters
php.memory_limit = 512M
php.date.timezone = Europe/Paris
php.upload_max_filesize = 10240M
php.post_max_size = 10240M
php.session.cookie_secure = True
php.max_execution_time = 1800
php.max_input_time = 3600
php.output_buffering = 'Off'
php.max_file_uploads = 100
[php-bin]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:bin}/php
command-line = ${instance-parameter:php-bin} -c ${php.ini-conf:rendered}
[matomo-backup-cron]
recipe = slapos.cookbook:cron.d
cron-entries = ${cron:cron-entries}
name = matomo-backup
frequency = 0 0 * * *
command = ${matomo-backup.sh:rendered}
[matomo-apache-httpd]
recipe = slapos.recipe.template:jinja2
template = {{ matomo_apache_httpd }}
rendered = ${directory:apache.d}/matomo.conf
context =
section parameter_dict apache-php-configuration
[matomo-backup.sh]
recipe = slapos.recipe.template:jinja2
template = {{ matomo_backup_sh }}
rendered = ${directory:scripts}/matomo-backup
context =
section parameter_dict instance-parameter
key php_bin php-bin:wrapper-path
[slap-parameter]
instance.cli-url = ${apache-php-configuration:url}
[buildout]
extends =
buildout.hash.cfg
# LAMP stands for Linux, Apache, MySQL, PHP
../../stack/lamp/buildout.cfg
# "slapos" stack describes basic things needed for 99.9% of SlapOS Software
../../stack/slapos.cfg
../../component/diffutils/buildout.cfg
parts =
# Call installation of slapos.cookbook egg defined in stack/slapos.cfg (needed
# in 99,9% of Slapos Software Releases)
slapos-cookbook
# to create file instance-matomo.cfg in instance of apache-php
template-matomo-instance
# to create file instance.cfg of all instances
instance
#to make sure we use python3
[python]
part = python3
# download bas
# inherited by modules that need to download files
[matomo-download]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:filename}
# download matomo
# The specific process of downloading and decompressing is defined in stack lamp
[application]
url = https://builds.matomo.org/matomo-4.7.1.zip
md5sum = 8d592676bc2c0d51363ad7b2caf171fe
# give the location of the instance-matomo.cfg fil
# Without it the instance-matomo.cfg file will not be executed
[custom-application-deployment]
path = ${template-matomo-instance:rendered}
part-list = matomo-backup.sh matomo-backup-cron matomo-apache-httpd
[template-matomo-instance]
recipe = slapos.recipe.template:jinja2
template = ${:_profile_base_location_}/${:filename}
rendered = ${buildout:directory}/instance-matomo.cfg
extensions = jinja2.ext.do
context =
key findutils_location findutils:location
key diffutils_location diffutils:location
key gzip_location gzip:location
key python3_location python3:location
key php_location apache-php:location
key matomo_apache_httpd template-apache-httpd:target
key matomo_backup_sh template-matomo-backup.sh:target
# download apache-httpd.conf.in
[template-apache-httpd]
<= matomo-download
# download matomo-backup.sh.in
[template-matomo-backup.sh]
<= matomo-download
Tests for matomo software release
##############################################################################
#
# Copyright (c) 2018 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from setuptools import setup, find_packages
version = '0.0.1.dev0'
name = 'slapos.test.matomo'
with open("README.md") as f:
long_description = f.read()
setup(
name=name,
version=version,
description="Test for SlapOS' matomo",
long_description=long_description,
long_description_content_type='text/markdown',
maintainer="Nexedi",
maintainer_email="info@nexedi.com",
url="https://lab.nexedi.com/nexedi/slapos",
packages=find_packages(),
install_requires=[
'slapos.core',
'slapos.libnetworkcache',
'erp5.util',
'requests',
],
zip_safe=True,
test_suite='test',
)
##############################################################################
# coding: utf-8
#
# Copyright (c) 2022 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import requests
import glob
from slapos.testing.testcase import makeModuleSetUpAndTestCaseClass
setUpModule, SlapOSInstanceTestCase = makeModuleSetUpAndTestCaseClass(
os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'software.cfg')))
class MatomoTestCase(SlapOSInstanceTestCase):
#check where matomo installed
def setUp(self):
partition_path_list = glob.glob(os.path.join(self.slap.instance_directory, '*'))
for partition_path in partition_path_list:
path = os.path.join(partition_path, 'srv/www')
if os.path.exists(path):
self.matomo_path = path
break
self.assertTrue(self.matomo_path,"matomo path not found in %r" % (partition_path_list,))
self.connection_parameters = self.computer_partition.getConnectionParameterDict()
#Check if matomo root directory is empty
def test_matomo_dir(self):
self.assertEqual(os.path.isfile(self.matomo_path),False)
#Check deployement matomo works
def test_matomo_url_get(self):
resp = requests.get(self.connection_parameters['backend-url'], verify=False)
self.assertEqual(requests.codes.ok, resp.status_code)
#Check deployement moniter works
def test_monitor_url_get(self):
resp = requests.get(self.connection_parameters['monitor-setup-url'], verify=False)
self.assertEqual(requests.codes.ok, resp.status_code)
......@@ -28,11 +28,11 @@ md5sum = e4c224da723ad56091f27ed5c0b0bbca
[template-lte-gnb-epc]
_update_hash_filename_ = instance-gnb-epc.jinja2.cfg
md5sum = b9a58fa4037d32fc1dc4f5ef89e6211a
md5sum = b15e678779dee0a26746487990fedc01
[template-lte-gnb]
_update_hash_filename_ = instance-gnb.jinja2.cfg
md5sum = 0b74993990a0dfa3c6429dc4ac716826
md5sum = 9c275dde5c485c05f92a9be053f10593
[template-lte-epc]
_update_hash_filename_ = instance-epc.jinja2.cfg
......@@ -48,7 +48,7 @@ md5sum = 8cac0de54f54236e750ee85b98de8a31
[gnb.jinja2.cfg]
filename = config/gnb.jinja2.cfg
md5sum = 28cc9fc7b1fa7cccb16315a732d9a15f
md5sum = 655186dae112b1baf561ae320ed86eef
[ltelogs.jinja2.sh]
filename = ltelogs.jinja2.sh
......
......@@ -17,7 +17,7 @@
#define USE_SRS 0
{
log_options: "all.level=debug,all.max_size=32",
log_options: "all.level=info,all.max_size=32,file.rotate=1G,file.path={{ directory['tmp'] }}",
log_filename: "{{ directory['log'] }}/gnb.log",
/* Enable remote API and Web interface */
......@@ -42,7 +42,7 @@
],
/* GTP bind address (=address of the ethernet interface connected to
the AMF). Must be modified if the AMF runs on a different host. */
{% if slapparameter_dict.get('mme_addr', '') %}
{% if slapparameter_dict.get('amf_addr', '') %}
gtp_addr: "{{ gtp_addr }}",
{% else %}
gtp_addr: "127.0.1.1",
......
......@@ -79,11 +79,11 @@ config-nr_band = {{ dumps(slapparameter_dict["nr_band"]) }}
{% if slapparameter_dict.get("nr_bandwidth", None) %}
config-nr_bandwidth = {{ dumps(slapparameter_dict["nr_bandwidth"]) }}
{% endif %}
{% if slapparameter_dict.get("mme_addr", None) %}
config-mme_addr = {{ dumps(slapparameter_dict["mme_addr"]) }}
{% if slapparameter_dict.get("amf_addr", None) %}
config-amf_addr = {{ dumps(slapparameter_dict["amf_addr"]) }}
{% endif %}
{% if slapparameter_dict.get("enb_id", None) %}
config-enb_id = {{ dumps(slapparameter_dict["enb_id"]) }}
{% if slapparameter_dict.get("gnb_id", None) %}
config-gnb_id = {{ dumps(slapparameter_dict["gnb_id"]) }}
{% endif %}
{% if slapparameter_dict.get("gnb_config_link", None) %}
config-gnb_config_link = {{ dumps(slapparameter_dict["gnb_config_link"]) }}
......
......@@ -5,6 +5,7 @@ parts =
lte-gnb-config
lte-enb-service
sdr-busy-promise
remove-tmp
monitor-base
publish-connection-information
......@@ -14,6 +15,11 @@ eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[remove-tmp]
# Remove old logs stored in tmp directory to prevent disk from becoming full
recipe = plone.recipe.command
command = rm -rf ${directory:tmp}/*
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration
computer = {{ slap_connection['computer-id'] }}
......@@ -45,6 +51,7 @@ etc = ${:home}/etc
var = ${:home}/var
etc = ${:home}/etc
bin = ${:home}/bin
tmp = ${:home}/tmp
run = ${:var}/run
script = ${:etc}/run
service = ${:etc}/service
......
......@@ -18,7 +18,7 @@ md5sum = 71531ed9c9b79fa769ab367e7ea2d2a5
[template-re6stnet]
filename = instance-re6stnet.cfg.in
md5sum = 98f86d2a10d909215ae88ba6a602da27
md5sum = a9cd303d17c4f07ad11fc8099afeed24
[template-apache-conf]
filename = apache.conf.in
......@@ -26,4 +26,4 @@ md5sum = 3d55f7c9c4fc7279f06bfe6313a78a4b
[template-re6st-registry-conf]
filename = re6st-registry.conf.in
md5sum = feb4b3318f37414d1bf3d16a03aec93d
md5sum = 28ffb7a742e293139814f1324ff003ff
......@@ -84,6 +84,11 @@
"title": "Git backup repository",
"description": "URL of Git repository where backups are pushed. Nothing is pushed if empty.",
"type": "string"
},
"community-conf": {
"title": "Community configuration",
"textarea": true,
"type": "string"
}
}
}
......@@ -83,6 +83,15 @@ hello = {{ slapparameter_dict.get('hello', 15) }}
min-protocol = {{ slapparameter_dict.get('min-protocol', -1) }}
encrypt = {{ slapparameter_dict.get('encrypt', 'False') }}
same-country = {{ slapparameter_dict.get('same-country', '') }}
{%- set community = slapparameter_dict.get('community-conf') %}
{%- if community %}
community-path = ${re6st-community-conf:output}
[re6st-community-conf]
recipe = slapos.recipe.template
inline = {{ dumps(community) }}
output = ${re6stnet-dirs:conf}/community.conf
{%- endif %}
[re6st-registry-conf]
recipe = slapos.recipe.template:jinja2
......@@ -90,20 +99,6 @@ template = {{ parameter_dict['template-re6st-registry-conf'] }}
rendered = ${re6stnet-dirs:conf}/registry.conf
context = section parameter_dict re6st-registry-conf-dict
depends = ${re6st-compat:recipe}
[re6st-compat]
recipe = slapos.recipe.build
update =
import errno, os
res6stnet = self.buildout['directory']['srv'] + '/res6stnet'
try:
os.rename(res6stnet + '/registry', self.buildout['re6stnet-dirs']['data'])
except OSError as e:
if e.errno != errno.ENOENT:
raise
else:
os.rmdir(res6stnet)
[re6st-registry-wrapper]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:services}/re6st-registry
......
......@@ -30,3 +30,6 @@ max-clients {{ parameter_dict['max-clients'] }}
{% if parameter_dict.get('same-country') -%}
same-country {{ parameter_dict['same-country'] }}
{% endif -%}
{% if parameter_dict.get('community-path') -%}
community {{ parameter_dict['community-path'] }}
{% endif -%}
......@@ -6,6 +6,7 @@
"default": {
"title": "Default",
"description": "Re6st registry",
"serialisation": "json-in-xml",
"request": "instance-re6stnet-input-schema.json",
"response": "instance-re6stnet-output-schema.json",
"index": 0
......
......@@ -26,3 +26,4 @@ extra =
restic-rest-server ${slapos.test.restic_rest_server-setup:setup}
headless-chromium ${slapos.test.headless-chromium-setup:setup}
hugo ${slapos.test.hugo-setup:setup}
matomo ${slapos.test.matomo-setup:setup}
......@@ -145,6 +145,12 @@ setup = ${slapos-repository:location}/software/html5as-base/test/
egg = slapos.test.hugo
setup = ${slapos-repository:location}/software/hugo/test/
[slapos.test.matomo-setup]
<= setup-develop-egg
egg = slapos.test.matomo
setup = ${slapos-repository:location}/software/matomo/test/
[slapos.test.jupyter-setup]
<= setup-develop-egg
egg = slapos.test.jupyter
......@@ -282,6 +288,7 @@ extra-eggs =
${slapos.test.headless-chromium-setup:egg}
${slapos.test.erp5testnode-setup:egg}
${slapos.test.hugo-setup:egg}
${slapos.test.matomo-setup:egg}
# We don't name this interpreter `python`, so that when we run slapos node
# software, installation scripts running `python` use a python without any
......
......@@ -15,7 +15,7 @@
[instance-theia]
_update_hash_filename_ = instance-theia.cfg.jinja.in
md5sum = 4df9f0d76a134a8abec9060a0c1be50b
md5sum = f8dde54b49aa62669c588913d21c6b15
[instance]
_update_hash_filename_ = instance.cfg.in
......@@ -31,7 +31,7 @@ md5sum = b3f1dd83033d6a45def0bd26e70d5a9c
[instance-resilient]
_update_hash_filename_ = instance-resilient.cfg.jinja
md5sum = b1e338973bc9cfe1bb4e16d46b3c6da9
md5sum = ad9499e7355ded4975ad313442cecb7a
[theia-common]
_update_hash_filename_ = theia_common.py
......@@ -47,7 +47,7 @@ md5sum = 1a668d6203d42b4d46d56e24c7606cb2
[python-language-server-requirements.txt]
_update_hash_filename_ = python-language-server-requirements.txt
md5sum = e16f1414f2657fa5ffc949839207f41b
md5sum = febc3e1e18e8e831ac5561e29c3b23d7
[slapos.css.in]
_update_hash_filename_ = slapos.css.in
......
......@@ -17,6 +17,10 @@
"backend-url": {
"description": "Theia Backend URL",
"type": "string"
},
"ipv6": {
"description": "A Theia ipv6 address",
"type": "string"
}
},
"type": "object"
......
......@@ -33,7 +33,7 @@ parts +=
# Ask for the connection parameters of the main theia
[request-theia]
return += url username password backend-url
return += url username password backend-url ipv6
# Publish connection parameters of the main theia and resiliency parameters
......@@ -43,6 +43,7 @@ url = ${request-theia:connection-url}
username = ${request-theia:connection-username}
password = ${request-theia:connection-password}
backend-url = ${request-theia:connection-backend-url}
ipv6 = ${request-theia:connection-ipv6}
monitor-base-url = ${monitor-publish:monitor-base-url}
monitor-setup-url = ${monitor-publish:monitor-setup-url}
<= publish-connection-information
......@@ -37,7 +37,7 @@ additional-url = $${remote-additional-frontend:connection-secure_access}
username = $${frontend-instance-password:username}
password = $${frontend-instance-password:passwd}
backend-url = $${frontend-instance:url}
ipv6 = {{ ipv6_random }}
[directory]
recipe = slapos.cookbook:mkdirectory
......@@ -172,7 +172,7 @@ slave = true
config-url = $${frontend-instance:url}
config-https-only = true
config-type = websocket
config-websocket-path-list = /services /file-upload
config-websocket-path-list = /services /socket.io
return = domain secure_access
[remote-frontend]
......@@ -252,7 +252,7 @@ template =
proxy /services $${theia-instance:base-url} {
websocket
}
proxy /file-upload $${theia-instance:base-url} {
proxy /socket.io $${theia-instance:base-url} {
websocket
}
basicauth $${frontend-instance-password:username} $${frontend-instance-password:passwd} {
......@@ -353,7 +353,7 @@ template =
#!/bin/sh
export HOME=$${directory:home}
export PATH=${python-language-server:location}/bin:${java-jdk:location}/bin:${cli-utilities:PATH}:$HOME/.cargo/bin:$PATH
export IPV6_SLAPRUNNER={{ ipv6_random }}
# Theia Backend
# -------------
......@@ -728,7 +728,8 @@ template =
"$${directory:runner}/**":true,
"$${directory:project}/runner/**":true
},
"git.terminalAuthentication": false
"git.terminalAuthentication": false,
"security.workspace.trust.startupPrompt": "once"
}
[runner-link]
......
appdirs==1.4.4
astroid==2.6.2
astroid==2.11.2
attrs==21.2.0
black==20.8b1
cachetools==4.2.4
certifi==2021.5.30
chardet==4.0.0
click==7.1.2
dill==0.3.4
future==0.18.2
idna==2.10
importlib-metadata==3.10.0
iniconfig==1.1.1
isort==5.9.1
isort==5.10.1
jedi==0.18.0
lazy-object-proxy==1.6.0
mccabe==0.6.1
lazy-object-proxy==1.7.1
mccabe==0.7.0
mypy-extensions==0.4.3
mypy==0.930
mypy==0.942
packaging==21.0
parso==0.8.2
pathspec==0.8.1
platformdirs==2.5.1
pluggy==0.13.1
pydantic==1.8.2
pydocstyle==6.0.0
pyflags==0.1
pyflakes==2.1.0
pygls==0.11.1
pylint==2.9.3
pylint==2.13.1
pyparsing==2.4.7
regex==2021.4.4
requests==2.25.1
rope==0.11.0
six==1.16.0
snowballstemmer==1.2.1
tomli==1.2.3
tomli==2.0.1
typed-ast==1.4.1
typeguard==2.12.1
typing-extensions==4.0.1
urllib3==1.26.6
wrapt==1.12.1
types-requests==2.25.0
types-toml==0.1.3
typing-extensions==4.1.1
urllib3==1.26.9
wrapt==1.14.0
yapf==0.31.0
zc.buildout.languageserver==0.6.2
zc.buildout.languageserver==0.7.0
zc.buildout==2.13.4
zipp==3.4.1
\ No newline at end of file
......@@ -44,10 +44,7 @@ shared-part-list =
max_version = 0
[nodejs]
<= nodejs-14.16.0
[yarn]
<= yarn-1.17.3
<= nodejs-16.13.2
[gowork]
install +=
......
......@@ -125,6 +125,9 @@ class TestTheia(TheiaTestCase):
resp = self.get(urljoin(authenticated_url, url))
self.assertTrue(resp.raw)
def test_ipv6_parameter_published(self):
self.assertIn('ipv6', self.connection_parameters)
def test_theia_slapos(self):
# Make sure we can use the shell and the integrated slapos command
process = pexpect.spawnu(
......
......@@ -15,7 +15,7 @@
[instance-cfg]
filename = instance.cfg.in
md5sum = b43d5e8d1fc2d0eeb54f91cefe6a5bae
md5sum = 403fe1e79e20ab87589092ce7429e73b
[template-turnserver]
filename = instance-turnserver.cfg.jinja2.in
......
......@@ -43,8 +43,8 @@ default-parameters =
"server-name" : "turn.example.com",
"ssl-key": "",
"ssl-crt": "",
"port": "3478",
"tls-port": "5349",
"port": 3478,
"tls-port": 5349,
"external-ip": ""
# listening-ip parameter is mandatory
#"listening-ip": null,
......
......@@ -26,7 +26,7 @@ md5sum = d10b8e35b02b5391cf46bf0c7dbb1196
[template-mariadb]
filename = instance-mariadb.cfg.in
md5sum = 2daa3aaa1263705c4628864230049c67
md5sum = b664a2221077db5db498422b6c90f015
[template-kumofs]
filename = instance-kumofs.cfg.in
......@@ -34,7 +34,7 @@ md5sum = cfe4696a67bf4886a5d8252a5274a941
[template-zope-conf]
filename = zope.conf.in
md5sum = 153fe68aa92452c3e2076d2ed26ff524
md5sum = b524c9ef4d7deadfd4bd84f2e880f9ed
[site-zcml]
filename = site.zcml
......@@ -42,7 +42,7 @@ md5sum = 43556e5bca8336dd543ae8068512aa6d
[template-my-cnf]
filename = my.cnf.in
md5sum = 1de449e8c0c4a85c5ce2b447785b7654
md5sum = c0bde08ec6bd6d333315a15026266b65
[template-mariadb-initial-setup]
filename = mariadb_initial_setup.sql.in
......@@ -78,7 +78,7 @@ md5sum = f5a1661449c9681b3de7d4af645124ba
[template-zeo]
filename = instance-zeo.cfg.in
md5sum = 3c59315a8f102a970dc54ded85df735a
md5sum = 84a77b40c5562a59a60d2146894890cf
[template-zodb-base]
filename = instance-zodb-base.cfg.in
......
......@@ -142,6 +142,8 @@ innodb-log-file-size = {{ dumps(slapparameter_dict.get('innodb-log-file-size', 0
innodb-file-per-table = {{ dumps(slapparameter_dict.get('innodb-file-per-table', 0)) }}
innodb-log-buffer-size = {{ dumps(slapparameter_dict.get('innodb-log-buffer-size', 0)) }}
relaxed-writes = {{ dumps(slapparameter_dict.get('relaxed-writes', False)) }}
character-set-server = {{ dumps(slapparameter_dict.get('character-set-server', 'utf8mb4')) }}
collation-server = {{ dumps(slapparameter_dict.get('collation-server', 'utf8mb4_general_ci')) }}
{% if incremental_backup_retention_days > -1 -%}
binlog-path = ${directory:mariadb-backup-incremental}/binlog
# XXX: binlog rotation happens along with other log's rotation
......
......@@ -28,7 +28,7 @@ ip = {{ ipv4 }}
{% set current_port = next(ports) -%}
{% set known_tid_storage_identifier_host = (ipv4, current_port), -%}
{% for name, zodb in zodb -%}
{% do storage_dict.__setitem__(name, {'server': ipv4 ~ ':' ~ current_port, 'storage': name}) %}
{% do storage_dict.__setitem__(name, {'server': ipv4 ~ ':' ~ current_port, 'storage': name, 'server-sync': 'true'}) %}
{% set path = zodb.get('path', '%(zodb)s/%(name)s.fs') % {'zodb': default_zodb_path, 'name': name} -%}
{% do storage_list.append((name, path)) -%}
{% set backup_directory = zodb.get('backup', '%(backup)s/%(name)s') % {'backup': default_backup_path, 'name': name} -%}
......
......@@ -76,9 +76,8 @@ relay-log = mariadb-relay-bin
{{x}}innodb_doublewrite = 0
{{x}}sync_frm = 0
# Force utf8 usage
character_set_server = utf8mb4
collation_server = utf8mb4_general_ci
character_set_server = {{ parameter_dict['character-set-server'] }}
collation_server = {{ parameter_dict['collation-server'] }}
skip_character_set_client_handshake
{% if 'ssl-key' in parameter_dict -%}
......
......@@ -129,9 +129,12 @@ trusted-proxy 0.0.0.0
{% for m in parameter_dict['import-list'] -%}
%import {{ m }}
{% endfor -%}
{% set type_dict = {'neo': 'NEOStorage', 'zeo': 'zeoclient'} %}
{% set type_dict = {'neo': 'NEOStorage', 'zeo': 'clientstorage'} %}
{% for name, zodb_dict in six.iteritems(parameter_dict['zodb-dict']) %}
<zodb_db {{ name }}>
{% if zodb_dict['type'] == 'zeo' %}
%import ZEO
{% endif %}
{%- set storage_type = type_dict[zodb_dict.pop('type')] %}
{%- set storage_dict = zodb_dict.pop('storage-dict') %}
{%- do root_common.apply_overrides(zodb_dict, node_id) %}
......
......@@ -132,7 +132,7 @@ eggs =
[versions]
setuptools = 44.1.1
# Use SlapOS patched zc.buildout
zc.buildout = 2.7.1+slapos015
zc.buildout = 2.7.1+slapos018
# Use SlapOS patched zc.recipe.egg (zc.recipe.egg 2.x is for Buildout 2)
zc.recipe.egg = 2.0.3+slapos003
......@@ -189,13 +189,13 @@ setproctitle = 1.1.10
setuptools-dso = 1.7
rubygemsrecipe = 0.4.3
six = 1.12.0
slapos.cookbook = 1.0.226
slapos.core = 1.7.2
slapos.cookbook = 1.0.237
slapos.core = 1.7.5
slapos.extension.strip = 0.4
slapos.extension.shared = 1.0
slapos.libnetworkcache = 0.22
slapos.libnetworkcache = 0.25
slapos.rebootstrap = 4.5
slapos.recipe.build = 0.53
slapos.recipe.build = 0.54
slapos.recipe.cmmi = 0.19
slapos.recipe.template = 5.0
slapos.toolbox = 0.126
......@@ -249,10 +249,12 @@ certifi = 2020.6.20
chardet = 3.0.4
urllib3 = 1.25.9
pkgconfig = 1.5.1
distro = 1.7.0
[versions:python2]
Werkzeug = 1.0.1
distro = 1.6.0
[networkcache]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment