Commit 413b416a authored by Cédric de Saint Martin's avatar Cédric de Saint Martin

Merge branch 'master' into slapos

parents 34a5f2d1 ab932c2e
include CHANGES.txt
include slapos/recipe/erp5/template/site.zcml
include slapos/recipe/generic_zope/template/site.zcml
recursive-include slapos/recipe *.in
recursive-include slapos/recipe *.bin
recursive-include slapos/recipe README.*.txt
[buildout]
parts = boa
[boa-patch-ENOSYS]
recipe = hexagonit.recipe.download
url = http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/www-servers/boa/files/boa-0.94.14_rc21-ENOSYS.patch?revision=1.1
filename = boa-0.94.14_rc21-ENOSYS.patch
md5sum = 7206b342195961501ed1eae38486e5db
download-only = true
[boa]
recipe = slapos.recipe.build
url = http://www.boa.org/boa-0.94.14rc21.tar.gz
md5sum = e24b570bd767a124fcfb40a34d148ba9
patches =
${boa-patch-ENOSYS:location}/${boa-patch-ENOSYS:filename}
slapos_promise =
directory:bin/
file:bin/boa
file:bin/boa_indexer
script =
import shutil
import os
url = self.download(self.options['url'], self.options['md5sum'])
extract_dir = self.extract(url)
workdir = guessworkdir(extract_dir)
self.applyPatchList(self.options.get('patches'), '-p1', cwd=workdir)
call(['./configure'], cwd=workdir)
call(['make'], cwd=workdir)
# Installation of boa. Manually, no make install
os.makedirs('%(location)s/bin/')
shutil.copyfile(workdir + '/src/boa', '%(location)s/bin/boa')
os.chmod('%(location)s/bin/boa', 0755)
shutil.copyfile(workdir + '/src/boa_indexer', '%(location)s/bin/boa_indexer')
os.chmod('%(location)s/bin/boa_indexer', 0755)
[buildout]
parts = busybox
[busybox]
recipe = slapos.recipe.build
url = http://busybox.net/downloads/busybox-1.19.3.tar.bz2
md5sum = c3938e1ac59602387009bbf1dd1af7f6
script =
extract_dir = self.extract(self.download(%(url)r, %(md5sum)r))
workdir = guessworkdir(extract_dir)
self.logger.info("Creating default configuration")
call(['make', 'defconfig'], cwd=workdir, env=env)
self.logger.info("Building")
call(['make'], cwd=workdir, env=env)
self.logger.info("Installing")
call(['make', 'CONFIG_PREFIX=%(location)s', 'install'], cwd=workdir, env=env)
self.logger.info("Installation finished")
# CA certificates
[buildout]
parts =
ca-certificates
[ca-certificates-sbin-dir.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
md5sum = 0b4e7d82ce768823c01954ee41ef177b
filename = ${:_buildout_section_name_}
download-only = true
[ca-certificates]
recipe = hexagonit.recipe.cmmi
version = 20111211
url = ftp://ftp.free.fr/mirrors/ftp.debian.org/pool/main/c/ca-certificates/ca-certificates_${:version}.tar.gz
patches =
${ca-certificates-sbin-dir.patch:location}/${ca-certificates-sbin-dir.patch:filename}
patch-options = -p0
configure-command = true
make-targets = install DESTDIR=${buildout:parts-directory}/${:_buildout_section_name_} CERTSDIR=certs SBINDIR=sbin
--- Makefile.orig 2011-12-11 20:54:02.000000000 +0100
+++ Makefile 2012-01-09 17:36:55.059392824 +0100
@@ -17,7 +17,7 @@
install:
for dir in $(SUBDIRS); do \
- mkdir $(DESTDIR)/$(CERTSDIR)/$$dir; \
+ mkdir -p $(DESTDIR)/$(CERTSDIR)/$$dir; \
$(MAKE) -C $$dir install CERTSDIR=$(DESTDIR)/$(CERTSDIR)/$$dir; \
done
for dir in sbin; do \
--- sbin/Makefile.orig 2011-12-11 20:54:02.000000000 +0100
+++ sbin/Makefile 2012-01-09 17:31:45.207387898 +0100
@@ -3,9 +3,12 @@
#
#
+SBINDIR=/usr/sbin
+
all:
clean:
install:
- install -m755 update-ca-certificates $(DESTDIR)/usr/sbin/
+ mkdir -p $(DESTDIR)/$(SBINDIR)
+ install -m755 update-ca-certificates $(DESTDIR)/$(SBINDIR)
......@@ -11,8 +11,8 @@ parts =
[curl]
recipe = hexagonit.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.21.7.tar.bz2
md5sum = 5f6d50c4d4ee38c57fe37e3cff75adbd
url = http://curl.haxx.se/download/curl-7.24.0.tar.bz2
#md5sum = 5f6d50c4d4ee38c57fe37e3cff75adbd
configure-options =
--disable-static
--disable-ldap
......
--- cyrus-sasl-2.1.23/include/sasl.h 2010-11-25 18:15:05.000000000 +0100
+++ cyrus-sasl-2.1.23/include/sasl.h 2010-11-25 18:15:34.000000000 +0100
@@ -346,7 +346,7 @@
* Mechanisms must ignore callbacks with id's they don't recognize.
*/
unsigned long id;
- int (*proc)(); /* Callback function. Types of arguments vary by 'id' */
+ int (*proc); /* Callback function. Types of arguments vary by 'id' */
void *context;
} sasl_callback_t;
[buildout]
parts = dash
[dash]
recipe = hexagonit.recipe.cmmi
url = http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.7.tar.gz
md5sum = f6cedb10ae7258adb5ab17a10ae80d51
configure-options =
--disable-static
--disable-fnmatch
--disable-glob
......@@ -13,7 +13,7 @@ parts =
[dropbear-userspace-patch]
recipe = hexagonit.recipe.download
md5sum = 89f575b9a9586b04ef9073c9c3af13ae
md5sum = 3d934c2c90e8c57536a4fa2cf8ad216d
url = ${:_profile_base_location_}/${:filename}
filename = userspace.patch
download-only = true
......@@ -25,6 +25,13 @@ url = ${:_profile_base_location_}/${:filename}
filename = ipv6-support.patch
download-only = true
[dropbear-no-shell-check-patch]
recipe = hexagonit.recipe.download
md5sum = bb2ac410bd4cb2b07c23bfcc712e45f7
url = ${:_profile_base_location_}/${:filename}
filename = no-shell-checking.patch
download-only = true
[dropbear]
recipe = hexagonit.recipe.cmmi
md5sum = 0284ea239083f04c8b874e08e1aca243
......@@ -35,7 +42,7 @@ url = http://matt.ucc.asn.au/dropbear/releases/dropbear-0.53.1.tar.bz2
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-zlib=${zlib:location}
CFLAGS="-DENABLE_SINGLEUSER "
CFLAGS="-DENABLE_SINGLEUSER -D__DIRTY_NO_SHELL_CHECKING"
environment =
CPPFLAGS =-I${zlib:location}/include
......@@ -44,6 +51,7 @@ environment =
patches=
${dropbear-userspace-patch:location}/${dropbear-userspace-patch:filename}
${dropbear-ipv6-patch:location}/${dropbear-ipv6-patch:filename}
${dropbear-no-shell-check-patch:location}/${dropbear-no-shell-check-patch:filename}
patch-options=
-p1
diff --git a/svr-auth.c b/svr-auth.c
index 87e3c5e..8b93a22 100644
--- a/svr-auth.c
+++ b/svr-auth.c
@@ -267,6 +267,7 @@ static int checkusername(unsigned char *username, unsigned int userlen) {
usershell = "/bin/sh";
}
+#ifndef __DIRTY_NO_SHELL_CHECKING
/* check the shell is valid. If /etc/shells doesn't exist, getusershell()
* should return some standard shells like "/bin/sh" and "/bin/csh" (this
* is platform-specific) */
@@ -288,6 +289,7 @@ static int checkusername(unsigned char *username, unsigned int userlen) {
goodshell:
endusershell();
+#endif
TRACE(("matching shell"))
TRACE(("uid = %d", ses.authstate.pw_uid))
......@@ -48,7 +48,7 @@ index 87e3c5e..adb2e8b 100644
+ if (svr_opts.singleuser) {
+ m_free(username);
+ /* Get the current login of the user running dropbear */
+ username = m_strdup(getlogin());
+ username = m_strdup(getpwuid(getuid())->pw_name);
+ }
+#endif /* ifdef ENABLE_SINGLEUSER */
servicename = buf_getstring(ses.payload, &servicelen);
......
# File - Determines file type using "magic" numbers
# http://www.darwinsys.com/file/
[buildout]
parts = file
extends =
../zlib/buildout.cfg
[file]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.astron.com/pub/file/file-5.07.tar.gz
md5sum = b8d1f9a8a644067bd0a703cebf3f4858
url = ftp://ftp.astron.com/pub/file/file-5.10.tar.gz
md5sum = 4cea34b087b060772511e066e2038196
configure-options =
--disable-static
environment =
......
......@@ -63,6 +63,7 @@ md5sum = d7cd6a27c8801e66cbaa964a039ecfdb
filename = ecj.jar
[gcc-download]
hack-revision = ${gcc-interconnection-workaround:hack-revision}
recipe = hexagonit.recipe.download
url = http://www.mirrorservice.org/sites/sourceware.org/pub/gcc/releases/gcc-4.5.3/gcc-4.5.3.tar.bz2
md5sum = 8e0b5c12212e185f3e4383106bfa9cc6
......@@ -70,6 +71,7 @@ strip-top-level-dir = True
destination = ${gcc-source:location}
[gcc-java-download]
hack-revision = ${gcc-interconnection-workaround:hack-revision}
recipe = hexagonit.recipe.download
url = http://www.mirrorservice.org/sites/sourceware.org/pub/gcc/releases/gcc-4.5.3/gcc-java-4.5.3.tar.bz2
md5sum = 08e045fdbdc22ac9af3aec3b8d16dbab
......@@ -78,6 +80,7 @@ destination = ${gcc-source:location}
ignore-existing = true
[gcc-source]
hack-revision = ${gcc-interconnection-workaround:hack-revision}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[gcc-multiarch.patch]
......@@ -87,7 +90,15 @@ url = ${:_profile_base_location_}/${:filename}
filename = ${:_buildout_section_name_}
download-only = true
[gcc-java-pre-4.4.patch]
recipe = hexagonit.recipe.download
md5sum = 9a563576126d9fcf234ef29c2fc7df76
url = ${:_profile_base_location_}/${:filename}
filename = ${:_buildout_section_name_}
download-only = true
[gcc-java-minimal]
hack-revision = ${gcc-interconnection-workaround:hack-revision}
depends =
${gcc-download:location}
${gcc-java-download:location}
......@@ -96,6 +107,7 @@ path = ${gcc-source:location}
md5sum = bb3265edf0fa7543e50cedb93e04e427
patches =
${gcc-multiarch.patch:location}/${gcc-multiarch.patch:filename}
${gcc-java-pre-4.4.patch:location}/${gcc-java-pre-4.4.patch:filename}
patch-options = -p2
configure-command = make clean \\; make distclean \\; ./configure
# GMP does not correctly detect achitecture so it have to be given
......@@ -121,6 +133,7 @@ environment =
make-targets = install -j1
[gcc]
hack-revision = ${gcc-interconnection-workaround:hack-revision}
depends =
${gcc-download:location}
${gcc-java-download:location}
......@@ -151,3 +164,10 @@ environment =
PATH=${zip:location}/bin:%(PATH)s
# make install does not work when several core are used
make-targets = install -j1
[gcc-interconnection-workaround]
# gcc parts are interconnected, so buildout is not capable to clean them up
# until gcc will be simplified by using more robust build recipe (like
# slapos.recipe.build) each time any of parts which reuses this one gets updated
# the hack-revision have to be increased
hack-revision = 1
Patch for http://gcc.gnu.org/bugzilla/show_bug.cgi?id=50888
--- a/src/libjava/libjava/prims.cc.orig 2012-01-20 11:30:18.586157610 +0100
+++ b/src/libjava/libjava/prims.cc 2012-01-20 11:30:58.192770947 +0100
@@ -38,6 +38,14 @@
#endif
#ifndef DISABLE_GETENV_PROPERTIES
+#ifdef __GLIBC__
+/* glibc 2.15+ provides even for C++ inline optimized ::isspace etc.
+ Unfortunately those inlines are throw (), and call a function pointer
+ (which is throw () too, but with -fnon-call-exceptions this results
+ in a __cxa_call_unexpected call. This macro disables the optimized
+ version. */
+#define __NO_CTYPE 1
+#endif
#include <ctype.h>
#include <java-props.h>
#define PROCESS_GCJ_PROPERTIES process_gcj_properties()
--- a/src/libjava/prims.cc.orig 2012-01-20 11:30:23.042818341 +0100
+++ b/src/libjava/prims.cc 2012-01-20 11:31:01.389433254 +0100
@@ -38,6 +38,14 @@
#endif
#ifndef DISABLE_GETENV_PROPERTIES
+#ifdef __GLIBC__
+/* glibc 2.15+ provides even for C++ inline optimized ::isspace etc.
+ Unfortunately those inlines are throw (), and call a function pointer
+ (which is throw () too, but with -fnon-call-exceptions this results
+ in a __cxa_call_unexpected call. This macro disables the optimized
+ version. */
+#define __NO_CTYPE 1
+#endif
#include <ctype.h>
#include <java-props.h>
#define PROCESS_GCJ_PROPERTIES process_gcj_properties()
......@@ -4,9 +4,9 @@ parts =
[gdbm]
recipe = hexagonit.recipe.cmmi
version = 1.9.1
version = 1.10
url = ftp://ftp.gnu.org/gnu/gdbm/gdbm-${:version}.tar.gz
md5sum = 59f6e4c4193cb875964ffbe8aa384b58
md5sum = 88770493c2559dc80b561293e39d3570
configure-options =
--disable-static
# install as parts/gdbm/include/gdbm/*.h etc. because some softwares
......
--- Makefile.in 2002-10-08 16:09:12.000000000 +0000
+++ Makefile.in.nochange 2010-11-03 21:17:38.579435530 +0000
@@ -14,10 +14,6 @@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_DATA = @INSTALL_DATA@
-# File ownership and group
-BINOWN = bin
-BINGRP = bin
-
MAKEINFO = makeinfo
TEXI2DVI = texi2dvi
@@ -131,11 +127,11 @@
$(INSTALL_ROOT)$(includedir) $(INSTALL_ROOT)$(man3dir) \
$(INSTALL_ROOT)$(infodir)
$(LIBTOOL) $(INSTALL) -c libgdbm.la $(INSTALL_ROOT)$(libdir)/libgdbm.la
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) gdbm.h \
+ $(INSTALL_DATA) gdbm.h \
$(INSTALL_ROOT)$(includedir)/gdbm.h
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/gdbm.3 \
+ $(INSTALL_DATA) $(srcdir)/gdbm.3 \
$(INSTALL_ROOT)$(man3dir)/gdbm.3
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/gdbm.info \
+ $(INSTALL_DATA) $(srcdir)/gdbm.info \
$(INSTALL_ROOT)$(infodir)/gdbm.info
install-compat:
@@ -143,9 +139,9 @@
$(INSTALL_ROOT)$(includedir)
$(LIBTOOL) $(INSTALL) -c libgdbm_compat.la \
$(INSTALL_ROOT)$(libdir)/libgdbm_compat.la
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/dbm.h \
+ $(INSTALL_DATA) $(srcdir)/dbm.h \
$(INSTALL_ROOT)$(includedir)/dbm.h
- $(INSTALL_DATA) -o $(BINOWN) -g $(BINGRP) $(srcdir)/ndbm.h \
+ $(INSTALL_DATA) $(srcdir)/ndbm.h \
$(INSTALL_ROOT)$(includedir)/ndbm.h
#libgdbm.a: $(OBJS) gdbm.h
......@@ -13,10 +13,8 @@ parts =
[git]
recipe = hexagonit.recipe.cmmi
# url = http://kernel.org/pub/software/scm/git/git-1.7.4.5.tar.bz2
# Circumvent kernel.org downtime
url = http://ftp.free.fr/mirrors/ftp.kernel.org/software/scm/git/git-1.7.4.5.tar.bz2
md5sum = 2fa6c4c847ed87523cf55de54af457eb
url = http://git-core.googlecode.com/files/git-1.7.8.3.tar.gz
md5sum = 7a4bc5160166537d4da5eb48a7670dff
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
......@@ -8,17 +8,15 @@ extends =
../fontconfig/buildout.cfg
../freetype/buildout.cfg
../libpng/buildout.cfg
../libtool/buildout.cfg
../pkgconfig/buildout.cfg
../zlib/buildout.cfg
[graphviz]
recipe = hexagonit.recipe.cmmi
url = http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.26.3.tar.gz
md5sum = 6f45946fa622770c45609778c0a982ee
url = http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.28.0.tar.gz
md5sum = 8d26c1171f30ca3b1dc1b429f7937e58
configure-options =
--with-ltdl-include=${libtool:location}/include
--with-ltdl-lib=${libtool:location}/lib
--with-included-ltdl
--with-pngincludedir=${libpng:location}/include
--with-pnglibdir=${libpng:location}/lib
--with-zincludedir=${zlib:location}/include
......
# mroonga - a MySQL storage engine using full-text search engine groonga
# http://mroonga.github.com/
# groonga - an open-source fulltext search engine and column store
# http://groonga.org/
[buildout]
......@@ -8,9 +7,10 @@ parts =
[groonga]
recipe = hexagonit.recipe.cmmi
url = http://packages.groonga.org/source/groonga/groonga-1.2.8.tar.gz
md5sum = a319b1f3a55cbf250ef5255f5c51ff46
url = http://packages.groonga.org/source/groonga/groonga-1.2.9.tar.gz
md5sum = 47117baa401a3db08362e00f94fced12
configure-options =
--disable-static
--disable-glibtest
--disable-benchmark
--without-mecab
diff -ur groonga-storage-engine-0.4.orig/configure groonga-storage-engine-0.4/configure
--- groonga-storage-engine-0.4.orig/configure 2010-11-24 06:23:50.000000000 +0100
+++ groonga-storage-engine-0.4/configure 2011-01-01 16:01:07.000000000 +0100
@@ -13925,8 +13925,8 @@
as_fn_error "failed to run \"$ac_mysql_config\": $plugindir" "$LINENO" 5
fi
MYSQL_INC="$MYSQL_INC $($ac_mysql_config --include)"
- ac_mysql_major_version="`$ac_mysql_config --version | cut -b 1-3`"
- if test "$ac_mysql_major_version" = "5.1"; then
+ ac_mysql_major_version="`$ac_mysql_config --version | cut -b 1,3`"
+ if test $ac_mysql_major_version -lt 55; then
MYSQL51="-DMYSQL51"
fi
......@@ -3,8 +3,8 @@ parts = haproxy
[haproxy]
recipe = hexagonit.recipe.cmmi
url = http://haproxy.1wt.eu/download/1.4/src/haproxy-1.4.18.tar.gz
md5sum = 4ac88bb1a76c4b84ed4f6131183bedbe
url = http://haproxy.1wt.eu/download/1.4/src/haproxy-1.4.19.tar.gz
md5sum = 41392d738460dbf99295fd928031c6a4
configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic".
......
......@@ -25,18 +25,10 @@ md5sum = 3f28ecd9f6722cf2c3238ce6ec3d7a68
download-only = true
filename = imagemagick-6.6.6-1-no-gsx-gsc-probe.patch
[imagemagick-6.6.7-4-without-lzma.patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
path = ${:filename}
md5sum = 04e1b934a58f4b3a83e4df148795b338
download-only = true
filename = imagemagick-6.6.7-4-without-lzma.patch
[imagemagick]
recipe = hexagonit.recipe.cmmi
url = ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-6.7.3-1.tar.bz2
md5sum = 89d378733d89bc61c04bc0fdc140a3a7
url = ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-6.7.3-10.tar.bz2
md5sum = 3c1d1a04b1ed2998217e16001b58069f
configure-options =
--disable-static
--without-x
......@@ -55,6 +47,7 @@ configure-options =
--without-lqr
--without-lzma
--without-openexr
--without-pango
--without-rsvg
--without-wmf
--without-xml
......@@ -66,7 +59,6 @@ configure-options =
patch-options = -p1
patches =
${imagemagick-6.6.6-1-no-gsx-gsc-probe.patch:location}/${imagemagick-6.6.6-1-no-gsx-gsc-probe.patch:filename}
${imagemagick-6.6.7-4-without-lzma.patch:location}/${imagemagick-6.6.7-4-without-lzma.patch:filename}
environment =
PATH=${freetype:location}/bin:${ghostscript:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${fontconfig:location}/lib/pkgconfig
......
--- ImageMagick-6.6.7-4/configure~ 2011-02-10 11:50:21.704561096 +0100
+++ ImageMagick-6.6.7-4/configure 2011-02-10 12:23:45.612561097 +0100
@@ -28251,7 +28251,7 @@
#
have_lzma='no'
LZMA_LIBS=''
-if test "$with_lzma" != 'no' || test "$with_tiff" != 'no'; then
+if test "$with_lzma" != 'no'; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: -------------------------------------------------------------" >&5
$as_echo "-------------------------------------------------------------" >&6; }
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LZMA" >&5
--- ImageMagick-6.6.7-4/configure.ac~ 2011-02-10 11:50:21.693561096 +0100
+++ ImageMagick-6.6.7-4/configure.ac 2011-02-10 12:23:40.013561098 +0100
@@ -2290,7 +2290,7 @@
#
have_lzma='no'
LZMA_LIBS=''
-if test "$with_lzma" != 'no' || test "$with_tiff" != 'no'; then
+if test "$with_lzma" != 'no'; then
AC_MSG_RESULT([-------------------------------------------------------------])
AC_MSG_CHECKING(for LZMA)
AC_MSG_RESULT()
......@@ -12,11 +12,12 @@ find-links =
[libreoffice-bin]
recipe = slapos.recipe.build
# here, two %s are used, first one is for directory name (eg. x86_64), and second one is for filename (eg. x86-64).
url = http://download.documentfoundation.org/libreoffice/stable/3.4.4/rpm/%s/LibO_3.4.4_Linux_%s_install-rpm_en-US.tar.gz
version = 3.4.5
url = http://download.documentfoundation.org/libreoffice/stable/${:version}/rpm/%s/LibO_${:version}_Linux_%s_install-rpm_en-US.tar.gz
# supported architectures md5sums
md5sum_x86 = 529c60e161d0c23405723f4a3cd1e046
md5sum_x86-64 = fc6cb85312d6e11a7ab6ddb1bc4e79cc
md5sum_x86 = 34786e6aa570782abac551ab092f3fb3
md5sum_x86-64 = 2159a50daab707c02b669a83f635ff0c
# where office code can be found?
officedir = libreoffice3.4
......
......@@ -46,6 +46,7 @@ configure-options =
--with-plugins=max-no-ndb
--with-aria-tmp-tables
--without-plugin-innodb_plugin
--without-plugin-oqgraph
--without-readline
--with-ssl
--with-libevent=${libevent:location}
......@@ -60,8 +61,8 @@ environment =
[mroonga-mariadb]
recipe = hexagonit.recipe.cmmi
url = https://github.com/downloads/mroonga/mroonga/mroonga-1.10.tar.gz
md5sum = 6a712b2b20eddc65d918dabd8fba590f
url = https://github.com/downloads/mroonga/mroonga/mroonga-1.11.tar.gz
md5sum = 69e56246226e0b9969ee7f99e08aa7da
configure-options =
--with-mysql-source=${mariadb:location}__compile__/mariadb-${mariadb:version}
--with-mysql-config=${mariadb:location}/bin/mysql_config
......
--- groonga-storage-engine-1.0.1/ha_mroonga.cc 2011-10-28 07:19:15.506715507 +0200
+++ groonga-storage-engine-1.0.1/ha_mroonga.cc 2011-11-02 11:37:03.095096227 +0100
@@ -77,6 +77,9 @@
extern "C" {
#endif
+/* groonga's internal functions */
+const char *grn_obj_get_value_(grn_ctx *ctx, grn_obj *obj, grn_id id, uint32 *size);
+
/* global variables */
pthread_mutex_t mrn_db_mutex;
pthread_mutex_t mrn_log_mutex;
@@ -109,7 +112,6 @@
static bool mrn_logfile_opened = false;
grn_log_level mrn_log_level_default = GRN_LOG_DEFAULT_LEVEL;
ulong mrn_log_level = (ulong) mrn_log_level_default;
-char mrn_default_parser_name[MRN_MAX_KEY_SIZE];
char *mrn_default_parser;
static void mrn_logger_func(int level, const char *time, const char *title,
@@ -228,13 +230,12 @@
"default parser changed from '%s' to '%s'",
old_value, new_value);
grn_ctx_fin(&ctx);
- strcpy(mrn_default_parser_name, new_value);
- mrn_default_parser = mrn_default_parser_name;
+ strncpy(mrn_default_parser, new_value, MRN_MAX_KEY_SIZE - 1);
DBUG_VOID_RETURN;
}
static MYSQL_SYSVAR_STR(default_parser, mrn_default_parser,
- PLUGIN_VAR_RQCMDARG,
+ PLUGIN_VAR_RQCMDARG | PLUGIN_VAR_MEMALLOC,
"default fulltext parser",
NULL,
mrn_default_parser_update,
@@ -908,6 +909,15 @@
field->charset());
break;
}
+ case MYSQL_TYPE_BLOB:
+ {
+ GRN_VOID_INIT(&buf);
+ uint32 len;
+ const char *val = grn_obj_get_value_(ctx, col, id, &len);
+ Field_blob *blob = (Field_blob *)field;
+ blob->set_ptr((uchar *)&len, (uchar *)val);
+ break;
+ }
default: //strings etc..
{
GRN_TEXT_INIT(&buf,0);
@@ -1010,6 +1020,9 @@
goto error_allocated_open_tables_hash_init;
}
+ mrn_default_parser = (char *)my_malloc(MRN_MAX_KEY_SIZE, MYF(MY_WME));
+ strncpy(mrn_default_parser, MRN_PARSER_DEFAULT, MRN_MAX_KEY_SIZE - 1);
+
return 0;
error_allocated_open_tables_hash_init:
@@ -4422,7 +4435,7 @@
DBUG_RETURN(error);
}
-int ha_mroonga::wrapper_index_read_map(uchar * buf, const uchar * key,
+int ha_mroonga::wrapper_index_read_map(uchar *buf, const uchar *key,
key_part_map keypart_map,
enum ha_rkey_function find_flag)
{
@@ -4442,7 +4455,11 @@
MRN_SET_WRAP_TABLE_KEY(this, table);
if (fulltext_searching)
set_pk_bitmap();
+#ifdef MRN_HANDLER_HAVE_HA_INDEX_READ_MAP
+ error = wrap_handler->ha_index_read_map(buf, key, keypart_map, find_flag);
+#else
error = wrap_handler->index_read_map(buf, key, keypart_map, find_flag);
+#endif
MRN_SET_BASE_SHARE_KEY(share, table->s);
MRN_SET_BASE_TABLE_KEY(this, table);
}
@@ -4557,7 +4574,7 @@
DBUG_RETURN(error);
}
-int ha_mroonga::index_read_map(uchar * buf, const uchar * key,
+int ha_mroonga::index_read_map(uchar *buf, const uchar *key,
key_part_map keypart_map,
enum ha_rkey_function find_flag)
{
@@ -4572,6 +4589,7 @@
DBUG_RETURN(error);
}
+#ifdef MRN_HANDLER_HAVE_INDEX_READ_LAST_MAP
int ha_mroonga::wrapper_index_read_last_map(uchar *buf, const uchar *key,
key_part_map keypart_map)
{
@@ -4658,6 +4676,7 @@
}
DBUG_RETURN(error);
}
+#endif
int ha_mroonga::wrapper_index_next(uchar *buf)
{
@@ -6226,7 +6245,11 @@
}
ha_rows ha_mroonga::wrapper_multi_range_read_info(uint keyno, uint n_ranges,
- uint keys, uint *bufsz,
+ uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
+ uint *bufsz,
uint *flags, COST_VECT *cost)
{
MRN_DBUG_ENTER_METHOD();
@@ -6236,6 +6259,9 @@
if (fulltext_searching)
set_pk_bitmap();
rows = wrap_handler->multi_range_read_info(keyno, n_ranges, keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ key_parts,
+#endif
bufsz, flags, cost);
MRN_SET_BASE_SHARE_KEY(share, table->s);
MRN_SET_BASE_TABLE_KEY(this, table);
@@ -6243,16 +6269,26 @@
}
ha_rows ha_mroonga::storage_multi_range_read_info(uint keyno, uint n_ranges,
- uint keys, uint *bufsz,
+ uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
+ uint *bufsz,
uint *flags, COST_VECT *cost)
{
MRN_DBUG_ENTER_METHOD();
ha_rows rows = handler::multi_range_read_info(keyno, n_ranges, keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ key_parts,
+#endif
bufsz, flags, cost);
DBUG_RETURN(rows);
}
ha_rows ha_mroonga::multi_range_read_info(uint keyno, uint n_ranges, uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
uint *bufsz, uint *flags,
COST_VECT *cost)
{
@@ -6261,9 +6297,15 @@
if (share->wrapper_mode)
{
rows = wrapper_multi_range_read_info(keyno, n_ranges, keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ key_parts,
+#endif
bufsz, flags, cost);
} else {
rows = storage_multi_range_read_info(keyno, n_ranges, keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ key_parts,
+#endif
bufsz, flags, cost);
}
DBUG_RETURN(rows);
@@ -6315,7 +6357,7 @@
DBUG_RETURN(error);
}
-int ha_mroonga::wrapper_multi_range_read_next(char **range_info)
+int ha_mroonga::wrapper_multi_range_read_next(range_id_t *range_info)
{
MRN_DBUG_ENTER_METHOD();
int error = 0;
@@ -6329,14 +6371,14 @@
DBUG_RETURN(error);
}
-int ha_mroonga::storage_multi_range_read_next(char **range_info)
+int ha_mroonga::storage_multi_range_read_next(range_id_t *range_info)
{
MRN_DBUG_ENTER_METHOD();
int error = handler::multi_range_read_next(range_info);
DBUG_RETURN(error);
}
-int ha_mroonga::multi_range_read_next(char **range_info)
+int ha_mroonga::multi_range_read_next(range_id_t *range_info)
{
MRN_DBUG_ENTER_METHOD();
int error = 0;
--- groonga-storage-engine-1.0.1/ha_mroonga.h 2011-10-27 12:31:36.859277054 +0200
+++ groonga-storage-engine-1.0.1/ha_mroonga.h 2011-11-02 11:37:03.095096227 +0100
@@ -47,18 +47,22 @@
# define MRN_HANDLER_HAVE_ADD_INDEX 1
#endif
-#if (MYSQL_VERSION_ID >= 50600) || \
- (defined(MRN_MARIADB_P) && MYSQL_VERSION_ID >= 50302)
-# define MRN_HANDLER_HAVE_HA_CLOSE 1
+#if (MYSQL_VERSION_ID >= 50603) || \
+ (defined(MRN_MARIADB_P) && MYSQL_VERSION_ID >= 50209)
# define MRN_HANDLER_HAVE_HA_RND_NEXT 1
# define MRN_HANDLER_HAVE_HA_RND_POS 1
+# define MRN_HANDLER_HAVE_HA_INDEX_READ_MAP 1
# define MRN_HANDLER_HAVE_HA_INDEX_READ_IDX_MAP 1
# define MRN_HANDLER_HAVE_HA_INDEX_NEXT 1
# define MRN_HANDLER_HAVE_HA_INDEX_PREV 1
# define MRN_HANDLER_HAVE_HA_INDEX_FIRST 1
# define MRN_HANDLER_HAVE_HA_INDEX_LAST 1
# define MRN_HANDLER_HAVE_HA_INDEX_NEXT_SAME 1
+#endif
+#if (MYSQL_VERSION_ID >= 50603) || \
+ (defined(MRN_MARIADB_P) && MYSQL_VERSION_ID >= 50302)
+# define MRN_HANDLER_HAVE_HA_CLOSE 1
# define MRN_HANDLER_HAVE_MULTI_RANGE_READ 1
#endif
@@ -66,6 +70,14 @@
# define MRN_HANDLER_HAVE_HA_INPLACE_INDEX_CHANGE
#endif
+#ifndef MRN_MARIADB_P
+# define MRN_HANDLER_HAVE_INDEX_READ_LAST_MAP
+#endif
+
+#if (defined(MRN_MARIADB_P) && MYSQL_VERSION_ID >= 50302)
+# define MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+#endif
+
#if MYSQL_VERSION_ID < 50600
typedef Item COND;
#endif
@@ -74,6 +86,10 @@
typedef MYSQL_ERROR Sql_condition;
#endif
+#ifndef MRN_MARIADB_P
+ typedef char *range_id_t;
+#endif
+
class ha_mroonga;
/* structs */
@@ -213,11 +229,15 @@
ha_rows records_in_range(uint inx, key_range *min_key, key_range *max_key);
int index_init(uint idx, bool sorted);
int index_end();
+#ifndef MRN_HANDLER_HAVE_HA_INDEX_READ_MAP
int index_read_map(uchar * buf, const uchar * key,
key_part_map keypart_map,
enum ha_rkey_function find_flag);
+#endif
+#ifdef MRN_HANDLER_HAVE_INDEX_READ_LAST_MAP
int index_read_last_map(uchar *buf, const uchar *key,
key_part_map keypart_map);
+#endif
#ifndef MRN_HANDLER_HAVE_HA_INDEX_NEXT
int index_next(uchar *buf);
#endif
@@ -261,11 +281,14 @@
uint n_ranges, uint *bufsz,
uint *flags, COST_VECT *cost);
ha_rows multi_range_read_info(uint keyno, uint n_ranges, uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
uint *bufsz, uint *flags, COST_VECT *cost);
int multi_range_read_init(RANGE_SEQ_IF *seq, void *seq_init_param,
uint n_ranges, uint mode,
HANDLER_BUFFER *buf);
- int multi_range_read_next(char **range_info);
+ int multi_range_read_next(range_id_t *range_info);
#else // MRN_HANDLER_HAVE_MULTI_RANGE_READ
int read_multi_range_first(KEY_MULTI_RANGE **found_range_p,
KEY_MULTI_RANGE *ranges,
@@ -321,6 +344,11 @@
#ifdef MRN_HANDLER_HAVE_HA_RND_POS
int rnd_pos(uchar *buf, uchar *pos);
#endif
+#ifdef MRN_HANDLER_HAVE_HA_INDEX_READ_MAP
+ int index_read_map(uchar *buf, const uchar *key,
+ key_part_map keypart_map,
+ enum ha_rkey_function find_flag);
+#endif
#ifdef MRN_HANDLER_HAVE_HA_INDEX_NEXT
int index_next(uchar *buf);
#endif
@@ -469,10 +497,12 @@
int storage_index_read_map(uchar *buf, const uchar *key,
key_part_map keypart_map,
enum ha_rkey_function find_flag);
+#ifdef MRN_HANDLER_HAVE_INDEX_READ_LAST_MAP
int wrapper_index_read_last_map(uchar *buf, const uchar *key,
key_part_map keypart_map);
int storage_index_read_last_map(uchar *buf, const uchar *key,
key_part_map keypart_map);
+#endif
int wrapper_index_next(uchar *buf);
int storage_index_next(uchar *buf);
int wrapper_index_prev(uchar *buf);
@@ -533,9 +563,15 @@
uint *flags,
COST_VECT *cost);
ha_rows wrapper_multi_range_read_info(uint keyno, uint n_ranges, uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
uint *bufsz, uint *flags,
COST_VECT *cost);
ha_rows storage_multi_range_read_info(uint keyno, uint n_ranges, uint keys,
+#ifdef MRN_HANDLER_HAVE_MULTI_RANGE_READ_INFO_KEY_PARTS
+ uint key_parts,
+#endif
uint *bufsz, uint *flags,
COST_VECT *cost);
int wrapper_multi_range_read_init(RANGE_SEQ_IF *seq, void *seq_init_param,
@@ -544,8 +580,8 @@
int storage_multi_range_read_init(RANGE_SEQ_IF *seq, void *seq_init_param,
uint n_ranges, uint mode,
HANDLER_BUFFER *buf);
- int wrapper_multi_range_read_next(char **range_info);
- int storage_multi_range_read_next(char **range_info);
+ int wrapper_multi_range_read_next(range_id_t *range_info);
+ int storage_multi_range_read_next(range_id_t *range_info);
#else // MRN_HANDLER_HAVE_MULTI_RANGE_READ
int wrapper_read_multi_range_first(KEY_MULTI_RANGE **found_range_p,
KEY_MULTI_RANGE *ranges,
--- groonga-storage-engine-1.0.1/test/run-sql-test.sh 2011-09-27 10:43:29.093290682 +0200
+++ groonga-storage-engine-1.0.1/test/run-sql-test.sh 2011-11-02 11:37:03.099096256 +0100
@@ -24,12 +24,20 @@
source_test_suites_dir="${source_mysql_test_dir}/suite"
build_test_suites_dir="${build_mysql_test_dir}/suite"
case "${MYSQL_VERSION}" in
- 5.1)
+ 5.1.*)
plugins_dir="${MYSQL_BUILD}/lib/mysql/plugin"
if ! test -d "${build_test_suites_dir}"; then
mkdir -p "${build_test_suites_dir}"
fi
;;
+ *-MariaDB*)
+ if ! test -d "${build_test_suites_dir}"; then
+ ln -s "${source_test_suites_dir}" "${build_test_suites_dir}"
+ fi
+ if ! test -d "${MYSQL_BUILD}/plugin/mroonga"; then
+ ln -s "${top_dir}" "${MYSQL_BUILD}/plugin/mroonga"
+ fi
+ ;;
*)
if ! test -d "${build_test_suites_dir}"; then
ln -s "${source_test_suites_dir}" "${build_test_suites_dir}"
@@ -47,10 +55,14 @@
fi
done
-make -C ${top_dir} \
- install-pluginLTLIBRARIES \
- plugindir=${plugins_dir} > /dev/null || \
- exit 1
+if test -n "${plugins_dir}"; then
+ make -C ${top_dir} \
+ install-pluginLTLIBRARIES \
+ plugindir=${plugins_dir} > /dev/null || \
+ exit 1
+else
+ make -C ${top_dir} > /dev/null || exit 1
+fi
(cd "$build_mysql_test_dir" && \
./mysql-test-run.pl \
[buildout]
extends =
../cmake/buildout.cfg
../glib/buildout.cfg
../pcre/buildout.cfg
../mariadb/buildout.cfg
parts = mydumper
# XXX-Antoine:
# This is really dirty, but it's the only way to install
# mydumper that works
[mydumper]
recipe = slapos.recipe.build
url = http://launchpad.net/mydumper/0.2/0.2.3/+download/mydumper-0.2.3.tar.gz
md5sum = 36e6a1c97a9634a6882ddaac5e2697d5
buildout-bin-dir = ${buildout:bin-directory}
cmake-command = ${cmake:location}/bin/cmake
mysql-config = ${mariadb:location}/bin/mysql_config
doc-dependency = ${mydumper-doc:eggs}
mysqllib = ${mariadb:location}/lib
pkg-config-path = ${glib:location}/lib/pkgconfig/:${pcre:location}/lib/pkgconfig/
libraries = ${zlib:location}/lib/:${glib:location}/lib/:${pcre:location}/lib/:${mariadb:location}/lib/mysql/
includes = ${zlib:location}/include/:${glib:location}/include/:${pcre:location}/include/:${mariadb:location}/include/mysql/
cflags = -I${zlib:location}/include/ -I${glib:location}/include/ -I${pcre:location}/include/ -I${mariadb:location}/include/mysql/
mydumper-patches =
${:_profile_base_location_}/mydumper-remove-warnings-errors.patch 917fea16b5ddea195cfa33fbd9827f57 -p1
slapos_promise =
directory:bin
file:bin/mydumper
file:bin/myloader
script =
import os
url = self.download(self.options['url'], self.options.get('md5sum'))
extract_dir = self.extract(url)
workdir = guessworkdir(extract_dir)
self.applyPatchList(self.options['mydumper-patches'], cwd=workdir)
env['PATH'] = self.options['buildout-bin-dir'] + ':' + env.get('PATH', '')
env['PKG_CONFIG_PATH'] = self.options['pkg-config-path'] + ':' + \
env.get('PKG_CONFIG_PATH', '')
env['CMAKE_INCLUDE_PATH'] = self.options['includes']
env['CMAKE_LIBRARY_PATH'] = self.options['libraries']
env['CFLAGS'] = self.options['cflags']
command_line = [self.options['cmake-command'],
'-DCMAKE_INSTALL_PREFIX=%%s' %% self.options['location'],
'-DMYSQL_CONFIG=%%s' %% self.options['mysql-config'],
'-DCMAKE_C_FLAGS=%%s' %% self.options['cflags'],
'-DCMAKE_INSTALL_RPATH=%%s' %% self.options['libraries'],
'.']
call(command_line, cwd=workdir, env=env)
call(['make'], cwd=workdir, env=env)
call(['make', 'install'], cwd=workdir, env=env)
[mydumper-doc]
recipe = zc.recipe.egg
eggs =
Sphinx
dependent-scripts = true
# XXX-Antoine: here's what I did using hexagonit.recipe.cmmi.
# and it wasn't working !
#[mydumper]
#recipe = hexagonit.recipe.cmmi
#url = http://launchpad.net/mydumper/0.2/0.2.3/+download/mydumper-0.2.3.tar.gz
#md5sum = 36e6a1c97a9634a6882ddaac5e2697d5
#strip-top-level-dir = true
#location = ${buildout:parts-directory}/${:_buildout_section_name_}
#configure-command =
# ${cmake:location}/bin/cmake \
# -DCMAKE_INSTALL_PREFIX=${:location} \
# -DMYSQL_CONFIG=${mariadb:location}/bin/mysql_config \
# -DCMAKE_INCLUDE_PATH=${zlib:location}/include \
# -DCMAKE_LIBRARY_PATH=${zlib:location}/lib \
# .
#environment=
# PATH=$PATH:${buildout:bin-directory}
# PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig/:${pcre:location}/lib/pkgconfig/
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -12,7 +12,7 @@
add_subdirectory(docs)
-set(CMAKE_C_FLAGS "-Wall -Wunused -Wwrite-strings -Wno-strict-aliasing -Wextra -Wshadow -Werror -O3 -g ${MYSQL_CFLAGS}")
+set(CMAKE_C_FLAGS "-Wall -Wunused -Wwrite-strings -Wno-strict-aliasing -Wextra -Wshadow -O3 -g ${MYSQL_CFLAGS}")
include_directories(${MYDUMPER_SOURCE_DIR} ${MYSQL_INCLUDE_DIR} ${GLIB2_INCLUDE_DIR} ${PCRE_INCLUDE_DIR} ${ZLIB_INCLUDE_DIRS})
......@@ -76,7 +76,6 @@ configure-options =
--enable-assembler
--without-readline
--with-sphinx-storage-engine
--with-named-curses-libs=${ncurses:location}/lib/libncurses.so
--with-zlib-dir=${zlib:location}
make-options =
......@@ -89,5 +88,5 @@ patches =
${mysql-5.0-sphinx-patch:location}/${mysql-5.0-sphinx-patch:filename}
environment =
PATH=${senna:location}/bin:${autoconf:location}/bin:${automake-1.11:location}/bin:${libtool:location}/bin:${bison:location}/bin:${flex:location}/bin:%(PATH)s
CPPFLAGS=-I${senna:location}/include/senna -I${ncurses:location}/include -I${readline:location}/include
LDFLAGS=-L${senna:location}/lib -L${readline:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${readline:location}/lib
CPPFLAGS=-I${senna:location}/include/senna -I${ncurses:location}/include -I${readline5:location}/include
LDFLAGS=-L${senna:location}/lib -L${readline5:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${readline5:location}/lib
......@@ -5,6 +5,7 @@
[buildout]
extends =
../ca-certificates/buildout.cfg
../zlib/buildout.cfg
parts =
......@@ -20,8 +21,8 @@ download-only = true
[openssl]
recipe = hexagonit.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.0f.tar.gz
md5sum = e358705fb4a8827b5e9224a73f442025
url = https://www.openssl.org/source/openssl-1.0.0g.tar.gz
md5sum = 07ecbe4324f140d157478637d6beccf1
patches =
${openssl-nodoc.patch:location}/${openssl-nodoc.patch:filename}
patch-options = -p0
......@@ -38,5 +39,7 @@ configure-options =
# it seems that parallel build sometimes fails for openssl.
make-options =
-j1
make-targets =
install && rm -f ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/* && for i in ${ca-certificates:location}/certs/*/*.crt; do ln -sv $i ${buildout:parts-directory}/${:_buildout_section_name_}/etc/ssl/certs/`${buildout:parts-directory}/${:_buildout_section_name_}/bin/openssl x509 -hash -noout -in $i`.0; done; true
LDFLAGS="-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib"
SHARED_LDFLAGS="-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${buildout:parts-directory}/${:_buildout_section_name_}/lib"
......@@ -10,7 +10,7 @@ parts =
recipe = hexagonit.recipe.cmmi
depends =
${perl:version}
url = http://www.percona.com/redir/downloads/percona-toolkit/percona-toolkit-1.0.1.tar.gz
md5sum = 1d843b1b3ebd2eacfa3bf95ef2a00557
url = http://www.percona.com/redir/downloads/percona-toolkit/percona-toolkit-2.0.1.tar.gz
md5sum = 3a78c78672cb7c634bda35dfb2f817bf
configure-command =
${perl:location}/bin/perl Makefile.PL
......@@ -11,8 +11,8 @@ extends =
[pkgconfig]
recipe = hexagonit.recipe.cmmi
url = http://pkgconfig.freedesktop.org/releases/pkg-config-0.25.tar.gz
md5sum = a3270bab3f4b69b7dc6dbdacbcae9745
url = http://pkgconfig.freedesktop.org/releases/pkg-config-0.26.tar.gz
md5sum = 47525c26a9ba7ba14bf85e01509a7234
location = ${buildout:parts-directory}/${:_buildout_section_name_}
# build pkg-config twice so that second configure can use pkg-config
# to compute GLIB_CFLAGS and GLIB_LIBS.
......
[buildout]
parts =
pwgen
[pwgen]
recipe = hexagonit.recipe.cmmi
url = http://downloads.sourceforge.net/project/pwgen/pwgen/2.06/pwgen-2.06.tar.gz
md5sum = 935aebcbe610fbc9de8125e7b7d71297
......@@ -2,8 +2,15 @@
parts =
readline5
readline
extends =
../ncurses/buildout.cfg
# readline-5.x is still used for GPL2 only softwares.
[readline5]
recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/readline/readline-5.2.tar.gz
md5sum = e39331f32ad14009b9ff49cc10c5e751
configure-options =
--enable-multibyte
--disable-static
# readline-5.x is still used for GPL2 only softwares.
[readline5]
......@@ -22,7 +29,5 @@ recipe = hexagonit.recipe.cmmi
url = http://ftp.gnu.org/gnu/readline/readline-6.2.tar.gz
md5sum = 67948acb2ca081f23359d0256e9a271c
configure-options =
--enable-multibyte
--disable-static
--with-ncurses=${ncurses:location}
environment =
LDFLAGS =-Wl,-rpath=${ncurses:location}/lib
From 50ec7439e80bd6a77346dc6482895e481d8cd43a Mon Sep 17 00:00:00 2001
From: Antoine Catton <acatton@tiolive.com>
Date: Tue, 10 Jan 2012 18:30:20 +0100
Subject: [PATCH] Switch to IPv6
---
libhttp/http.h | 4 ++--
libhttp/httpconnection.c | 11 ++++++++++-
libhttp/server.c | 33 +++++++++++++++++++--------------
libhttp/server.h | 6 +++---
shellinabox/shellinaboxd.c | 14 +++++++-------
5 files changed, 41 insertions(+), 27 deletions(-)
diff --git a/libhttp/http.h b/libhttp/http.h
index e7840fa..5cd61e3 100644
--- a/libhttp/http.h
+++ b/libhttp/http.h
@@ -66,8 +66,8 @@ typedef struct ServerConnection ServerConnection;
typedef struct Server Server;
typedef struct URL URL;
-Server *newCGIServer(int localhostOnly, int portMin, int portMax, int timeout);
-Server *newServer(int localhostOnly, int port);
+Server *newCGIServer(char *ipv6, int portMin, int portMax, int timeout);
+Server *newServer(char *ipv6, int port);
void deleteServer(Server *server);
int serverGetListeningPort(Server *server);
int serverGetFd(Server *server);
diff --git a/libhttp/httpconnection.c b/libhttp/httpconnection.c
index c8e69f6..cae467f 100644
--- a/libhttp/httpconnection.c
+++ b/libhttp/httpconnection.c
@@ -823,8 +823,17 @@ static int httpHandleCommand(struct HttpConnection *http,
const char *host = getFromHashMap(&http->header,
"host");
if (host) {
+ int brackets = 0; // For IPv6 hosts
for (char ch; (ch = *host) != '\000'; host++) {
- if (ch == ':') {
+ if (ch == '[') {
+ brackets = 1;
+ break;
+ }
+ if (ch == ']') {
+ brackets = 0;
+ break;
+ }
+ if (!brackets && ch == ':') {
*(char *)host = '\000';
break;
}
diff --git a/libhttp/server.c b/libhttp/server.c
index f52a269..2c30bd8 100644
--- a/libhttp/server.c
+++ b/libhttp/server.c
@@ -170,19 +170,19 @@ static int serverQuitHandler(struct HttpConnection *http, void *arg) {
return HTTP_DONE;
}
-struct Server *newCGIServer(int localhostOnly, int portMin, int portMax,
+struct Server *newCGIServer(char *ipv6, int portMin, int portMax,
int timeout) {
struct Server *server;
check(server = malloc(sizeof(struct Server)));
- initServer(server, localhostOnly, portMin, portMax, timeout);
+ initServer(server, ipv6, portMin, portMax, timeout);
return server;
}
-struct Server *newServer(int localhostOnly, int port) {
- return newCGIServer(localhostOnly, port, port, -1);
+struct Server *newServer(char *ipv6, int port) {
+ return newCGIServer(ipv6, port, port, -1);
}
-void initServer(struct Server *server, int localhostOnly, int portMin,
+void initServer(struct Server *server, char *ipv6, int portMin,
int portMax, int timeout) {
server->looping = 0;
server->exitAll = 0;
@@ -192,14 +192,19 @@ void initServer(struct Server *server, int localhostOnly, int portMin,
server->numConnections = 0;
int true = 1;
- server->serverFd = socket(PF_INET, SOCK_STREAM, 0);
+ server->serverFd = socket(PF_INET6, SOCK_STREAM, 0);
check(server->serverFd >= 0);
check(!setsockopt(server->serverFd, SOL_SOCKET, SO_REUSEADDR,
&true, sizeof(true)));
- struct sockaddr_in serverAddr = { 0 };
- serverAddr.sin_family = AF_INET;
- serverAddr.sin_addr.s_addr = htonl(localhostOnly
- ? INADDR_LOOPBACK : INADDR_ANY);
+ struct sockaddr_in6 serverAddr = { 0 };
+ serverAddr.sin6_family = AF_INET6;
+ if (ipv6 != NULL) {
+ if (!inet_pton(AF_INET6, ipv6, serverAddr.sin6_addr.s6_addr)) {
+ fatal("Bad ipv6 address");
+ }
+ } else {
+ serverAddr.sin6_addr = in6addr_any;
+ }
// Linux unlike BSD does not have support for picking a local port range.
// So, we have to randomly pick a port from our allowed port range, and then
@@ -214,14 +219,14 @@ void initServer(struct Server *server, int localhostOnly, int portMin,
int portStart = rand() % (portMax - portMin + 1) + portMin;
for (int p = 0; p <= portMax-portMin; p++) {
int port = (p+portStart)%(portMax-portMin+1)+ portMin;
- serverAddr.sin_port = htons(port);
+ serverAddr.sin6_port = htons(port);
if (!bind(server->serverFd, (struct sockaddr *)&serverAddr,
sizeof(serverAddr))) {
break;
}
- serverAddr.sin_port = 0;
+ serverAddr.sin6_port = 0;
}
- if (!serverAddr.sin_port) {
+ if (!serverAddr.sin6_port) {
fatal("Failed to find any available port");
}
}
@@ -231,7 +236,7 @@ void initServer(struct Server *server, int localhostOnly, int portMin,
check(!getsockname(server->serverFd, (struct sockaddr *)&serverAddr,
&socklen));
check(socklen == sizeof(serverAddr));
- server->port = ntohs(serverAddr.sin_port);
+ server->port = ntohs(serverAddr.sin6_port);
info("Listening on port %d", server->port);
check(server->pollFds = malloc(sizeof(struct pollfd)));
diff --git a/libhttp/server.h b/libhttp/server.h
index bb879fb..5ffb698 100644
--- a/libhttp/server.h
+++ b/libhttp/server.h
@@ -78,10 +78,10 @@ struct Server {
struct SSLSupport ssl;
};
-struct Server *newCGIServer(int localhostOnly, int portMin, int portMax,
+struct Server *newCGIServer(char *ipv6, int portMin, int portMax,
int timeout);
-struct Server *newServer(int localhostOnly, int port);
-void initServer(struct Server *server, int localhostOnly, int portMin,
+struct Server *newServer(char *ipv6, int port);
+void initServer(struct Server *server, char *ipv6, int portMin,
int portMax, int timeout);
void destroyServer(struct Server *server);
void deleteServer(struct Server *server);
diff --git a/shellinabox/shellinaboxd.c b/shellinabox/shellinaboxd.c
index dcf05ff..2d1d758 100644
--- a/shellinabox/shellinaboxd.c
+++ b/shellinabox/shellinaboxd.c
@@ -80,7 +80,7 @@
static int port;
static int portMin;
static int portMax;
-static int localhostOnly = 0;
+static char *ipv6 = NULL;
static int noBeep = 0;
static int numericHosts = 0;
static int enableSSL = 1;
@@ -747,7 +747,7 @@ static void usage(void) {
" -g, --group=GID switch to this group (default: %s)\n"
" -h, --help print this message\n"
" --linkify=[none|normal|agressive] default is \"normal\"\n"
- " --localhost-only only listen on 127.0.0.1\n"
+ " --ipv6 listen on a specific ipv6\n"
" --no-beep suppress all audio output\n"
" -n, --numeric do not resolve hostnames\n"
" -p, --port=PORT select a port (default: %d)\n"
@@ -839,7 +839,7 @@ static void parseArgs(int argc, char * const argv[]) {
{ "static-file", 1, 0, 'f' },
{ "group", 1, 0, 'g' },
{ "linkify", 1, 0, 0 },
- { "localhost-only", 0, 0, 0 },
+ { "ipv6", 1, 0, 0 },
{ "no-beep", 0, 0, 0 },
{ "numeric", 0, 0, 'n' },
{ "port", 1, 0, 'p' },
@@ -1001,8 +1001,8 @@ static void parseArgs(int argc, char * const argv[]) {
"\"none\", \"normal\", or \"aggressive\".");
}
} else if (!idx--) {
- // Localhost Only
- localhostOnly = 1;
+ // IPv6
+ ipv6 = optarg;
} else if (!idx--) {
// No Beep
noBeep = 1;
@@ -1197,7 +1197,7 @@ int main(int argc, char * const argv[]) {
// Create a new web server
Server *server;
if (port) {
- check(server = newServer(localhostOnly, port));
+ check(server = newServer(ipv6, port));
dropPrivileges();
setUpSSL(server);
} else {
@@ -1217,7 +1217,7 @@ int main(int argc, char * const argv[]) {
_exit(0);
}
check(!NOINTR(close(fds[0])));
- check(server = newCGIServer(localhostOnly, portMin, portMax,
+ check(server = newCGIServer(ipv6, portMin, portMax,
AJAX_TIMEOUT));
cgiServer = server;
setUpSSL(server);
--
1.7.6.5
From eee6f7180dc5dd4523264e7ce0721945ab2b78a1 Mon Sep 17 00:00:00 2001
From: Antoine Catton <acatton@tiolive.com>
Date: Wed, 11 Jan 2012 17:32:15 +0100
Subject: [PATCH 2/2] Allow to run entire command path.
---
shellinabox/launcher.c | 3 +--
1 files changed, 1 insertions(+), 2 deletions(-)
diff --git a/shellinabox/launcher.c b/shellinabox/launcher.c
index fb8a133..e116a75 100644
--- a/shellinabox/launcher.c
+++ b/shellinabox/launcher.c
@@ -1226,8 +1226,7 @@ static void execService(int width, int height, struct Service *service,
extern char **environ;
environ = environment;
- char *cmd = strrchr(argv[0], '/');
- execvp(cmd ? cmd + 1: argv[0], argv);
+ execvp(argv[0], argv);
}
void setWindowSize(int pty, int width, int height) {
--
1.7.6.5
[buildout]
extends =
../zlib/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
parts = shellinabox
[shellinabox-full-path-patch]
recipe = hexagonit.recipe.download
filename = 0002-Allow-to-run-entire-command-path.patch
url = ${:_profile_base_location_}/${:filename}
download-only = true
[shellinabox-ipv6-patch]
recipe = hexagonit.recipe.download
filename = 0001-Switch-to-IPv6.patch
url = ${:_profile_base_location_}/${:filename}
download-only = true
[shellinabox]
recipe = hexagonit.recipe.cmmi
url = http://shellinabox.googlecode.com/files/shellinabox-2.10.tar.gz
md5sum = 0e144910d85d92edc54702ab9c46f032
environment =
CFLAGS = -I${zlib:location}/include -I${openssl:location}/include
LDFLAGS = -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib
PKG_CONFIG_PATH = ${openssl:location}/lib/pkgconfig/
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
patches =
${shellinabox-ipv6-patch:location}/${shellinabox-ipv6-patch:filename}
${shellinabox-full-path-patch:location}/${shellinabox-full-path-patch:filename}
# http://www.sphinxsearch.com/bugs/view.php?id=550
# 0000550: Can not make libsphinxclient
--- sphinx-1.10-beta/api/libsphinxclient/sphinxclient.c 2010-07-15 13:05:40.000000000 +0200
+++ sphinx-1.10-beta/api/libsphinxclient/sphinxclient.c 2010-07-21 20:43:26.760024489 +0200
@@ -1355,11 +1355,13 @@
optval = 1;
#ifndef _WIN32
+ #ifdef SO_NOSIGPIPE
if ( setsockopt ( sock, SOL_SOCKET, SO_NOSIGPIPE, (void *)&optval, (socklen_t)sizeof(optval) ) < 0 )
{
set_error ( client, "setsockopt() failed: %s", sock_error() );
return -1;
}
+ #endif
#endif
res = connect ( sock, (struct sockaddr*)&sa, sizeof(sa) );
......@@ -5,8 +5,8 @@ parts =
[sqlite3]
recipe = hexagonit.recipe.cmmi
url = http://www.sqlite.org/sqlite-autoconf-3070900.tar.gz
md5sum = dce303524736fe89a76b8ed29d566352
url = http://www.sqlite.org/sqlite-autoconf-3071000.tar.gz
md5sum = 9ed2ca93577b58cfa0d01f64b9312ab9
configure-options =
--disable-static
--enable-readline
......
......@@ -4,8 +4,8 @@ parts =
[zabbix-agent]
recipe = hexagonit.recipe.cmmi
url = http://prdownloads.sourceforge.net/zabbix/zabbix-1.8.5.tar.gz?download
md5sum = 58b9253fb0eace1e20b2fe5c1fce32a3
url = http://prdownloads.sourceforge.net/zabbix/zabbix-1.8.10.tar.gz?download
md5sum = 7e89f80c1822787c0831f7c0dbefcd7b
configure-options =
--enable-agent
--enable-ipv6
......@@ -30,6 +30,7 @@ setup(name=name,
'lxml', # for full blown python interpreter
'netaddr', # to manipulate on IP addresses
'setuptools', # namespaces
'inotifyx', # to watch filesystem changes (used in lockfile)
'slapos.core', # uses internally
# 'slapos.toolbox', # needed for libcloud, cloudmgr, disabled for now
'xml_marshaller', # need to communication with slapgrid
......@@ -39,45 +40,81 @@ setup(name=name,
zip_safe=True,
entry_points={
'zc.buildout': [
'apachephp = slapos.recipe.apachephp:Recipe',
'apacheproxy = slapos.recipe.apacheproxy:Recipe',
'apache.zope.backend = slapos.recipe.apache_zope_backend:Recipe',
'certificate_authority = slapos.recipe.certificate_authority:Recipe',
'certificate_authority.request = slapos.recipe.certificate_authority:Request',
'cron = slapos.recipe.dcron:Recipe',
'cron.d = slapos.recipe.dcron:Part',
'davstorage = slapos.recipe.davstorage:Recipe',
'dropbear = slapos.recipe.dropbear:Recipe',
'dropbear.add_authorized_key = slapos.recipe.dropbear:AddAuthorizedKey',
'dropbear.client = slapos.recipe.dropbear:Client',
'duplicity = slapos.recipe.duplicity:Recipe',
'erp5 = slapos.recipe.erp5:Recipe',
'erp5scalabilitytestbed = slapos.recipe.erp5scalabilitytestbed:Recipe',
'equeue = slapos.recipe.equeue:Recipe',
'erp5testnode = slapos.recipe.erp5testnode:Recipe',
'helloworld = slapos.recipe.helloworld:Recipe',
'generic.cloudooo = slapos.recipe.generic_cloudooo:Recipe',
'fontconfig = slapos.recipe.fontconfig:Recipe',
'java = slapos.recipe.java:Recipe',
'kumofs = slapos.recipe.kumofs:Recipe',
'generic.kumofs = slapos.recipe.generic_kumofs:Recipe',
'haproxy = slapos.recipe.haproxy:Recipe',
'kvm = slapos.recipe.kvm:Recipe',
'libcloud = slapos.recipe.libcloud:Recipe',
'libcloudrequest = slapos.recipe.libcloudrequest:Recipe',
'lockfile = slapos.recipe.lockfile:Recipe',
'memcached = slapos.recipe.memcached:Recipe',
'generic.memcached = slapos.recipe.generic_memcached:Recipe',
'mysql = slapos.recipe.mysql:Recipe',
'mydumper = slapos.recipe.mydumper:Recipe',
'generic.mysql = slapos.recipe.generic_mysql:Recipe',
'mkdirectory = slapos.recipe.mkdirectory:Recipe',
'nbdserver = slapos.recipe.nbdserver:Recipe',
'nosqltestbed = slapos.recipe.nosqltestbed:NoSQLTestBed',
'notifier = slapos.recipe.notifier:Recipe',
'notifier.callback = slapos.recipe.notifier:Callback',
'notifier.notify = slapos.recipe.notifier:Notify',
'lamp = slapos.recipe.lamp:Request',
'lamp.request = slapos.recipe.lamp:Request',
'lamp.static = slapos.recipe.lamp:Static',
'lamp.simple = slapos.recipe.lamp:Simple',
'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part',
'pbs = slapos.recipe.pbs:Recipe',
'publish = slapos.recipe.publish:Recipe',
'publishurl = slapos.recipe.publishurl:Recipe',
'pwgen = slapos.recipe.pwgen:Recipe',
'proactive = slapos.recipe.proactive:Recipe',
'request = slapos.recipe.request:Recipe',
'sheepdogtestbed = slapos.recipe.sheepdogtestbed:SheepDogTestBed',
'shell = slapos.recipe.shell:Recipe',
'shellinabox = slapos.recipe.shellinabox:Recipe',
'symbolic.link = slapos.recipe.symbolic_link:Recipe',
'softwaretype = slapos.recipe.softwaretype:Recipe',
'siptester = slapos.recipe.siptester:SipTesterRecipe',
'simplelogger = slapos.recipe.simplelogger:Recipe',
'slaprunner = slapos.recipe.slaprunner:Recipe',
'sshkeys_authority = slapos.recipe.sshkeys_authority:Recipe',
'sshkeys_authority.request = slapos.recipe.sshkeys_authority:Request',
'sphinx= slapos.recipe.sphinx:Recipe',
'stunnel = slapos.recipe.stunnel:Recipe',
'testnode = slapos.recipe.testnode:Recipe',
'urlparse = slapos.recipe._urlparse:Recipe',
'vifib = slapos.recipe.vifib:Recipe',
'waitfor = slapos.recipe.waitfor:Recipe',
'xwiki = slapos.recipe.xwiki:Recipe',
'zabbixagent = slapos.recipe.zabbixagent:Recipe',
'generic.zope = slapos.recipe.generic_zope:Recipe',
'generic.zope.zeo.client = slapos.recipe.generic_zope_zeo_client:Recipe',
'generate.erp5.tidstorage = slapos.recipe.generate_erp5_tidstorage:Recipe',
'generate.cloudooo = slapos.recipe.generate_cloudooo:Recipe',
'zeo = slapos.recipe.zeo:Recipe',
'tidstorage = slapos.recipe.tidstorage:Recipe',
'erp5.update = slapos.recipe.erp5_update:Recipe',
'erp5.test = slapos.recipe.erp5_test:Recipe',
],
'slapos.recipe.nosqltestbed.plugin': [
'kumo = slapos.recipe.nosqltestbed.kumo:KumoTestBed',
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from urlparse import urlparse
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def _options(self, options):
url = urlparse(options['url'])
def to_str(data):
if data is None:
return ''
return str(data)
options.update(scheme=url.scheme,
username=to_str(url.username),
password=to_str(url.password),
host=to_str(url.hostname),
port=to_str(url.port),
path=url.path.strip('/'),
params=url.params,
query=url.query,
fragment=url.fragment)
def install(self):
return []
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import pkg_resources
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
ip = self.options['ip']
port = self.options['port']
backend = self.options['backend']
key = self.options['key-file']
certificate = self.options['cert-file']
access_control_string = self.options['access-control-string']
apache_conf = dict()
apache_conf['pid_file'] = self.options['pid-file']
apache_conf['lock_file'] = self.options['lock-file']
apache_conf['ip'] = ip
apache_conf['port'] = port
apache_conf['server_admin'] = 'admin@'
apache_conf['error_log'] = self.options['error-log']
apache_conf['access_log'] = self.options['access-log']
apache_conf['server_name'] = '%s' % apache_conf['ip']
apache_conf['certificate'] = certificate
apache_conf['key'] = key
apache_conf['path'] = '/'
apache_conf['access_control_string'] = access_control_string
apache_conf['rewrite_rule'] = "RewriteRule (.*) %s$1 [L,P]" % backend
apache_conf_string = pkg_resources.resource_string(__name__,
'template/apache.zope.conf.in') % apache_conf
apache_config_file = self.createFile(self.options['configuration-file'],
apache_conf_string)
path_list.append(apache_config_file)
wrapper = self.createPythonScript(self.options['wrapper'], __name__ +
'.apache.runApache', [
dict(
required_path_list=[key, certificate],
binary=self.options['apache-binary'],
config=apache_config_file
)
])
path_list.append(wrapper)
return path_list
import os
import sys
import time
def runApache(args):
sleep = 60
conf = args[0]
while True:
ready = True
for f in conf.get('required_path_list', []):
if not os.path.exists(f):
print 'File %r does not exists, sleeping for %s' % (f, sleep)
ready = False
if ready:
break
time.sleep(sleep)
apache_wrapper_list = [conf['binary'], '-f', conf['config'], '-DFOREGROUND']
apache_wrapper_list.extend(sys.argv[1:])
sys.stdout.flush()
sys.stderr.flush()
os.execl(apache_wrapper_list[0], *apache_wrapper_list)
# Apache configuration file for Zope
# Automatically generated
# List of modules
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule ssl_module modules/mod_ssl.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
# Basic server configuration
PidFile "%(pid_file)s"
LockFile "%(lock_file)s"
Listen %(ip)s:%(port)s
ServerAdmin %(server_admin)s
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
# As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER
# SSL Configuration
SSLEngine on
SSLCertificateFile %(certificate)s
SSLCertificateKeyFile %(key)s
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
SSLProxyEngine On
# Log configuration
ErrorLog "%(error_log)s"
# Default apache log format with request time in microsecond at the end
LogFormat "%%h %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\" %%D" combined
CustomLog "%(access_log)s" combined
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
# Path protected
<Location %(path)s>
Order Deny,Allow
Deny from all
Allow from %(access_control_string)s
</Location>
# Magic of Zope related rewrite
RewriteEngine On
%(rewrite_rule)s
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import shutil
import os
import signal
from binascii import b2a_uu as uuencode
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
# Copy application
shutil.rmtree(self.options['htdocs'])
shutil.copytree(self.options['source'],
self.options['htdocs'])
# Install php.ini
php_ini = self.createFile(os.path.join(self.options['php-ini-dir'],
'php.ini'),
self.substituteTemplate(self.getTemplateFilename('php.ini.in'),
dict(tmp_directory=self.options['tmp-dir']))
)
path_list.append(php_ini)
# Install apache
apache_config = dict(
pid_file=self.options['pid-file'],
lock_file=self.options['lock-file'],
ip=self.options['ip'],
port=self.options['port'],
error_log=self.options['error-log'],
access_log=self.options['access-log'],
document_root=self.options['htdocs'],
php_ini_dir=self.options['php-ini-dir'],
)
httpd_conf = self.createFile(self.options['httpd-conf'],
self.substituteTemplate(self.getTemplateFilename('apache.in'),
apache_config)
)
path_list.append(httpd_conf)
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['httpd-binary'], '-f', self.options['httpd-conf'],
'-DFOREGROUND']
)
path_list.append(wrapper)
secret_key_filename = os.path.join(self.buildout['buildout']['directory'],
'.php_secret_key')
if not os.path.exists(secret_key_filename):
secret_key = uuencode(os.urandom(45)).strip()
# Remove unsafe characters
secret_key = secret_key.translate(None, '"\'')
with open(secret_key_filename, 'w') as secret_key_file:
secret_key_file.write(secret_key)
else:
with open(secret_key_filename, 'r') as secret_key_file:
secret_key = secret_key_file.read()
application_conf = dict(mysql_database=self.options['mysql-database'],
mysql_user=self.options['mysql-username'],
mysql_password=self.options['mysql-password'],
mysql_host='%s:%s' % (self.options['mysql-host'],
self.options['mysql-port']),
secret_key=secret_key,
)
directory, file_ = os.path.split(self.options['configuration'])
path = self.options['htdocs']
if directory:
path = os.path.join(path, directory)
if not os.path.exists(path):
os.makedirs(path)
if not os.path.isdir(path):
raise OSError("Cannot create %r." % path)
destination = os.path.join(path, file_)
config = self.createFile(destination,
self.substituteTemplate(self.options['template'], application_conf))
path_list.append(config)
if os.path.exists(self.options['pid-file']):
# Reload apache configuration
with open(self.options['pid-file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
os.kill(pid, signal.SIGUSR1) # Graceful restart
return path_list
# Apache static configuration
# Automatically generated
# Basic server configuration
PidFile "%(pid_file)s"
LockFile "%(lock_file)s"
Listen %(ip)s:%(port)s
PHPINIDir %(php_ini_dir)s
ServerAdmin someone@email
DefaultType text/plain
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
AddType application/x-httpd-php .php .phtml .php5 .php4
AddType application/x-httpd-php-source .phps
# Log configuration
ErrorLog "%(error_log)s"
LogLevel warn
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common
CustomLog "%(access_log)s" common
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
<Directory %(document_root)s>
Options FollowSymLinks
AllowOverride All
Order allow,deny
Allow from all
</Directory>
DocumentRoot %(document_root)s
DirectoryIndex index.html index.php
# List of modules
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule dir_module modules/mod_dir.so
LoadModule php5_module modules/libphp5.so
LoadModule alias_module modules/mod_alias.so
[PHP]
engine = On
safe_mode = Off
expose_php = Off
error_reporting = E_ALL & ~(E_DEPRECATED|E_NOTICE|E_WARNING)
display_errors = On
display_startup_errors = Off
log_errors = On
log_errors_max_len = 1024
ignore_repeated_errors = Off
ignore_repeated_source = Off
session.save_path = "%(tmp_directory)s"
session.auto_start = 0
date.timezone = Europe/Paris
file_uploads = On
upload_max_filesize = 8M
post_max_size = 8M
magic_quotes_gpc=0ff
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import signal
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
# Install apache
apache_config = dict(
pid_file=self.options['pid-file'],
lock_file=self.options['lock-file'],
ip=self.options['ip'],
port=self.options['port'],
error_log=self.options['error-log'],
access_log=self.options['access-log'],
backend_url=self.options['url'],
)
httpd_conf = self.createFile(self.options['httpd-conf'],
self.substituteTemplate(self.getTemplateFilename('apache.in'),
apache_config)
)
path_list.append(httpd_conf)
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['httpd-binary'], '-f', self.options['httpd-conf'],
'-DFOREGROUND']
)
path_list.append(wrapper)
if os.path.exists(self.options['pid-file']):
# Reload apache configuration
with open(self.options['pid-file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
os.kill(pid, signal.SIGUSR1) # Graceful restart
return path_list
# Apache static configuration
# Automatically generated
# Basic server configuration
PidFile "%(pid_file)s"
LockFile "%(lock_file)s"
Listen %(ip)s:%(port)s
ServerAdmin someone@email
DefaultType text/plain
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
# Log configuration
ErrorLog "%(error_log)s"
LogLevel warn
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
LogFormat "%%h %%{REMOTE_USER}i %%l %%u %%t \"%%r\" %%>s %%b" common
CustomLog "%(access_log)s" common
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
ProxyPass / %(backend_url)s
# List of modules
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule mime_module modules/mod_mime.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule dir_module modules/mod_dir.so
LoadModule alias_module modules/mod_alias.so
......@@ -117,7 +117,7 @@ class Request(Recipe):
if os.path.islink(link):
os.unlink(link)
elif os.path.exists(link):
raise OSError("%r file should be a symbolic link.")
raise OSError("%r file should be a symbolic link." % link)
os.symlink(key, key_file)
os.symlink(certificate, cert_file)
......
......@@ -2,6 +2,7 @@ import os
import subprocess
import time
import ConfigParser
import uuid
def popenCommunicate(command_list, input=None):
......@@ -44,7 +45,7 @@ class CertificateAuthority:
popenCommunicate([self.openssl_binary, 'req', '-nodes', '-config',
self.openssl_configuration, '-new', '-x509', '-extensions',
'v3_ca', '-keyout', self.key, '-out', self.certificate,
'-days', '10950'], 'Automatic Certificate Authority\n')
'-days', '10950'], 'Certificate Authority %s\n' % uuid.uuid1())
except:
try:
for f in file_list:
......
......@@ -24,102 +24,88 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import BaseSlapRecipe
import os
import subprocess
import pkg_resources
import zc.buildout
import zc.recipe.egg
import sys
class Recipe(BaseSlapRecipe):
def getTemplateFilename(self, template_name):
return pkg_resources.resource_filename(__name__,
'template/%s' % template_name)
def _install(self):
self.path_list = []
self.requirements, self.ws = self.egg.working_set()
document_root = self.createDataDirectory('www')
apache_config = self.installApache(document_root)
self.setConnectionUrl(scheme='webdavs',
host=apache_config['ip'],
port=apache_config['port'],
auth=(apache_config['user'],
apache_config['password']))
return self.path_list
def installApache(self, document_root, ip=None, port=None):
if ip is None:
ip=self.getGlobalIPv6Address()
if port is None:
port = '9080'
htpasswd_config = self.createHtpasswd()
ssl_config = self.createCertificate(size=2048)
import httplib
import base64
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def _options(self, options):
options['password'] = self.generatePassword()
def install(self):
path_list = []
htpasswd_file = self.options['htpasswd-file']
# Create or empty the file
open(htpasswd_file, 'w').close()
path_list.append(htpasswd_file)
user = self.options['user']
password = self.options['password']
subprocess.check_call([self.options['apache-htpasswd'],
'-bc', htpasswd_file,
user, password
])
apache_config = dict(
pid_file=os.path.join(self.run_directory, 'httpd.pid'),
lock_file=os.path.join(self.run_directory, 'httpd.lock'),
davlock_db=os.path.join(self.run_directory, 'davdb.lock'),
ip=ip,
port=port,
error_log=os.path.join(self.log_directory, 'httpd-error.log'),
access_log=os.path.join(self.log_directory, 'httpd-access.log'),
document_root=document_root,
modules_dir=self.options['apache_modules_dir'],
mime_types=self.options['apache_mime_file'],
server_root=self.work_directory,
email_address='admin@vifib.net',
htpasswd_file=htpasswd_config['htpasswd_file'],
ssl_certificate=ssl_config['certificate'],
ssl_key=ssl_config['key'],
pid_file=self.options['pid-file'],
lock_file=self.options['lock-file'],
davlock_db=self.options['davdb-lock'],
ip=self.options['ip'],
port=self.options['port'],
error_log=self.options['error-log'],
access_log=self.options['access-log'],
document_root=self.options['htdocs'],
modules_dir=self.options['apache-modules-dir'],
mime_types=self.options['apache-mime-file'],
server_root=self.options['root'],
email_address=self.options['email-address'],
htpasswd_file=htpasswd_file,
ssl_certificate=self.options['cert-file'],
ssl_key=self.options['key-file'],
)
httpd_config_file = self.createConfigurationFile('httpd.conf',
# Create logfiles
for log in [self.options['error-log'], self.options['access-log']]:
open(log, 'a').close()
config_file = self.createFile(self.options['conf-file'],
self.substituteTemplate(self.getTemplateFilename('httpd.conf.in'),
apache_config))
self.path_list.append(httpd_config_file)
apache_runner = zc.buildout.easy_install.scripts(
[('httpd', 'slapos.recipe.librecipe.execute', 'execute')],
self.ws, sys.executable, self.wrapper_directory,
arguments=[self.options['apache_binary'],
'-f', httpd_config_file,
'-DFOREGROUND',
]
)[0]
self.path_list.append(apache_runner)
return dict(ip=apache_config['ip'],
port=apache_config['port'],
user=htpasswd_config['user'],
password=htpasswd_config['password']
apache_config)
)
path_list.append(config_file)
def createHtpasswd(self):
htpasswd = self.createConfigurationFile('htpasswd', '')
self.path_list.append(htpasswd)
password = self.generatePassword()
user = 'user'
subprocess.check_call([self.options['apache_htpasswd'],
'-bc', htpasswd,
user, password
])
return dict(htpasswd_file=htpasswd,
user=user,
password=password)
def createCertificate(self, size=1024, subject='/C=FR/L=Marcq-en-Baroeul/O=Nexedi'):
key_file = os.path.join(self.etc_directory, 'httpd.key')
self.path_list.append(key_file)
certificate_file = os.path.join(self.etc_directory, 'httpd.crt')
self.path_list.append(certificate_file)
subprocess.check_call([self.options['openssl_binary'],
'req', '-x509', '-nodes',
'-newkey', 'rsa:%s' % size,
'-subj', str(subject),
'-out', certificate_file,
'-keyout', key_file
])
return dict(key=key_file,
certificate=certificate_file)
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['apache-binary'], '-f', config_file, '-DFOREGROUND'])
path_list.append(wrapper)
promise = self.createPythonScript(self.options['promise'],
__name__ + '.promise',
dict(host=self.options['ip'], port=int(self.options['port']),
user=self.options['user'], password=self.options['password'])
)
path_list.append(promise)
return path_list
def promise(args):
host = args['host']
port = args['port']
user = args['user']
password = args['password']
connection = httplib.HTTPSConnection(host, port)
auth = base64.b64encode('%s:%s' % (user, password))
connection.request('OPTIONS', '/',
headers=dict(
Authorization='Basic %s' % auth,
)
)
connection.getresponse()
return 0
......@@ -58,13 +58,10 @@ class Recipe(GenericBaseRecipe):
class Part(GenericBaseRecipe):
def _options(self, options):
if 'name' not in options:
options['name'] = self.name
def install(self):
cron_d = self.options['cron-entries']
filename = os.path.join(cron_d, 'name')
name = self.options['name']
filename = os.path.join(cron_d, name)
with open(filename, 'w') as part:
part.write('%(frequency)s %(command)s\n' % {
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import itertools
from slapos.recipe.librecipe import GenericBaseRecipe
class KnownHostsFile(dict):
def __init__(self, filename):
self._filename = filename
def _load(self):
if os.path.exists(self._filename):
with open(self._filename, 'r') as keyfile:
for line in keyfile:
host, key = [column.strip() for column in line.split(' ', 1)]
self[host] = key
def _dump(self):
with open(self._filename, 'w') as keyfile:
for key, value in self.items():
if key is not None and value is not None:
keyfile.write('%(host)s %(key)s\n' % {'host': key,
'key': value})
def __enter__(self):
self._load()
def __exit__(self, exc_type, exc_value, traceback):
self._dump()
class AuthorizedKeysFile(object):
def __init__(self, filename):
self.filename = filename
def append(self, key):
"""Append the key to the file if the key's not in the file
"""
# Create the file it it does not exist
try:
file_ = os.open(self.filename, os.O_CREAT | os.O_EXCL)
os.close(file_)
except:
pass
with open(self.filename, 'r') as keyfile:
# itertools.imap avoid loading all the authorized_keys file in
# memory which would be counterproductive.
present = (key.strip() in itertools.imap(lambda k: k.strip(),
keyfile))
try:
keyfile.seek(-1, os.SEEK_END)
ended_by_newline = (keyfile.read() == '\n')
except IOError:
ended_by_newline = True
if not present:
with open(self.filename, 'a') as keyfile:
if not ended_by_newline:
keyfile.write('\n')
keyfile.write(key.strip())
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
dropbear_cmd = [self.options['dropbear-binary']]
# Don't fork into background
dropbear_cmd.append('-F')
# Log on stderr
dropbear_cmd.append('-E')
# Don't display motd
dropbear_cmd.append('-m')
# Disable password login
dropbear_cmd.extend(['-s', '-g'])
# Disable port forwarding
dropbear_cmd.extend(['-j', '-k'])
host = self.options['host']
if ':' in host:
host = '[%s]' % host
port = self.options['port']
binding_address = '%s:%s' % (host, port)
dropbear_cmd.extend(['-p', binding_address])
# Single user mode
dropbear_cmd.append('-n')
if 'dss-keyfile' in self.options:
dropbear_cmd.extend(['-d', self.options['dss-keyfile']])
else:
dropbear_cmd.extend(['-r', self.options['rsa-keyfile']])
env = {}
if 'home' in self.options:
env['DROPBEAR_OVERRIDE_HOME'] = self.options['home']
if 'shell' in self.options:
env['DROPBEAR_OVERRIDE_SHELL'] = self.options['shell']
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.executee',
(dropbear_cmd, env, )
)
path_list.append(wrapper)
return path_list
class Client(GenericBaseRecipe):
def install(self):
env = dict()
if 'home' in self.options:
env['HOME'] = self.options['home']
self.createDirectory(self.options['home'], '.ssh')
dropbear_cmd = [self.options['dbclient-binary'], '-T']
if self.optionIsTrue('force-host-key', default=False):
dropbear_cmd.extend(['-y'])
if 'identity-file' in self.options:
dropbear_cmd.extend(['-i', self.options['identity-file']])
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.executee',
(dropbear_cmd, env, )
)
return [wrapper]
class AddAuthorizedKey(GenericBaseRecipe):
def install(self):
path_list = []
ssh = self.createDirectory(self.options['home'], '.ssh')
path_list.append(ssh)
authorized_keys = AuthorizedKeysFile(os.path.join(ssh, 'authorized_keys'))
authorized_keys.append(self.options['key'])
return path_list
......@@ -30,15 +30,23 @@ class Recipe(GenericBaseRecipe):
def install(self):
remote_url = self.options['remote_backup']
backup_directory = self.options['directory']
remote_url = self.options['remote-backup']
backup_directory = self.options['local-directory']
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['duplicity_binary'], '--no-encryption',
backup_directory, remote_url]
)
return [wrapper]
cmd = [self.options['duplicity-binary'],]
options = ['--no-encryption', '--archive-dir', self.options['cache']]
if self.optionIsTrue('recover', False):
options.append('--force')
# duplicity [options] remote backup
cmd.extend(options)
cmd.extend([remote_url, backup_directory])
else:
# duplicity [options] local remote
cmd.extend(options)
cmd.extend([backup_directory, remote_url])
wrapper = self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute', cmd)
return [wrapper]
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
commandline = [self.options['equeue-binary']]
commandline.extend(['--database', self.options['database']])
commandline.extend(['-l', self.options['log']])
if 'loglevel' in self.options:
commandline.extend(['--loglevel', self.options['loglevel']])
commandline.append(self.options['socket'])
return [self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
commandline,
)]
......@@ -25,130 +25,14 @@
#
##############################################################################
from slapos.recipe.librecipe import BaseSlapRecipe
import binascii
import os
import pkg_resources
import pprint
import hashlib
import sys
import zc.buildout
import zc.recipe.egg
import ConfigParser
import re
_isurl = re.compile('([a-zA-Z0-9+.-]+)://').match
# based on Zope2.utilities.mkzopeinstance.write_inituser
def Zope2InitUser(path, username, password):
open(path, 'w').write('')
os.chmod(path, 0600)
open(path, "w").write('%s:{SHA}%s\n' % (
username,binascii.b2a_base64(hashlib.sha1(password).digest())[:-1]))
class Recipe(BaseSlapRecipe):
def getTemplateFilename(self, template_name):
return pkg_resources.resource_filename(__name__,
'template/%s' % template_name)
site_id = 'erp5'
def _install(self):
self.path_list = []
self.requirements, self.ws = self.egg.working_set()
# self.cron_d is a directory, where cron jobs can be registered
self.cron_d = self.installCrond()
self.logrotate_d, self.logrotate_backup = self.installLogrotate()
self.killpidfromfile = zc.buildout.easy_install.scripts(
[('killpidfromfile', __name__ + '.killpidfromfile',
'killpidfromfile')], self.ws, sys.executable, self.bin_directory)[0]
self.path_list.append(self.killpidfromfile)
ca_conf = self.installCertificateAuthority()
memcached_conf = self.installMemcached(ip=self.getLocalIPv4Address(),
port=11000)
kumo_conf = self.installKumo(self.getLocalIPv4Address())
conversion_server_conf = self.installConversionServer(
self.getLocalIPv4Address(), 23000, 23060)
mysql_conf = self.installMysqlServer(self.getLocalIPv4Address(), 45678)
user, password = self.installERP5()
if self.parameter_dict.get("slap_software_type", "").lower() == "cluster":
# Site access is done by HAProxy
zope_access, site_access, key_access = self.installZopeCluster(ca_conf)
else:
zope_access = self.installZopeStandalone()
site_access = zope_access
key_access = None
key, certificate = self.requestCertificate('Login Based Access')
apache_conf = dict(
apache_login=self.installBackendApache(ip=self.getGlobalIPv6Address(),
port=13000, backend=site_access, key=key, certificate=certificate))
connection_dict = dict(site_url=apache_conf['apache_login'])
if self.parameter_dict.get("domain_name") is not None:
connection_dict["backend_url"] = apache_conf['apache_login']
connection_dict["domain_ip"] = self.getGlobalIPv6Address()
# XXX Define a fake domain_name for now.
frontend_name = self.parameter_dict.get("domain_name")
frontend_key, frontend_certificate = \
self.requestCertificate(frontend_name)
connection_dict["site_url"] = self.installFrontendZopeApache(
ip=self.getGlobalIPv6Address(), port=4443, name=frontend_name,
frontend_path='/', backend_path='',
backend_url=apache_conf['apache_login'], key=frontend_key,
certificate=frontend_certificate)
default_bt5_list = []
if self.parameter_dict.get("flavour", "default") == 'configurator':
default_bt5_list = self.options.get("configurator_bt5_list", '').split()
self.installERP5Site(user, password, zope_access, mysql_conf,
conversion_server_conf, memcached_conf, kumo_conf,
self.site_id, default_bt5_list, ca_conf)
self.installTestRunner(ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf)
self.installTestSuiteRunner(ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf)
self.linkBinary()
connection_dict.update(**dict(
site_user=user,
site_password=password,
memcached_url=memcached_conf['memcached_url'],
kumo_url=kumo_conf['kumo_address']
))
if key_access is not None:
connection_dict['key_access'] = key_access
if self.options.get('fulltext_search', None) == 'sphinx':
sphinx_searchd = self.installSphinxSearchd(ip=self.getLocalIPv4Address())
connection_dict.update(**sphinx_searchd)
self.setConnectionDict(connection_dict)
return self.path_list
def installZopeStandalone(self):
""" Install a single Zope instance without ZEO Server.
"""
zodb_dir = os.path.join(self.data_root_directory, 'zodb')
self._createDirectory(zodb_dir)
zodb_root_path = os.path.join(zodb_dir, 'main.fs')
thread_amount_per_zope = int(self.options.get(
'single_zope_thread_amount', 4))
zodb_cache_size = int(self.options.get('zodb_cache_size', 5000))
return self.installZope(ip=self.getLocalIPv4Address(),
port=12000 + 1, name='zope_%s' % 1,
zodb_configuration_string=self.substituteTemplate(
self.getTemplateFilename('zope-zodb-snippet.conf.in'),
dict(zodb_root_path=zodb_root_path,
zodb_cache_size=zodb_cache_size)),
with_timerservice=True,
thread_amount=thread_amount_per_zope)
raise NotImplementedError('Outdated.')
def installKeyAuthorisationApache(self, ipv6, port, backend, key, certificate,
ca_conf, key_auth_path='/'):
......@@ -209,820 +93,6 @@ SSLCARevocationPath %(ca_crl)s"""
else:
return 'https://%(ip)s:%(port)s' % apache_conf
def installZopeCluster(self, ca_conf=None):
""" Install ERP5 using ZEO Cluster
"""
site_check_path = '/%s/getId' % self.site_id
thread_amount_per_zope = int(self.options.get(
'cluster_zope_thread_amount', 1))
activity_node_amount = int(self.options.get(
"cluster_activity_node_amount", 2))
user_node_amount = int(self.options.get(
"cluster_user_node_amount", 2))
key_auth_node_amount = int(self.options.get(
"key_auth_node_amount", 0))
ip = self.getLocalIPv4Address()
storage_dict = self._requestZeoFileStorage('Zeo Server 1', 'main')
zeo_conf = self.installZeo(ip)
tidstorage_config = dict(host=ip, port='6001')
# XXX How to define good values for this?
mount_point = '/'
zodb_cache_size = 5000
zeo_client_cache_size = '20MB'
check_path = '/erp5/account_module'
known_tid_storage_identifier_dict = {}
known_tid_storage_identifier_dict[
(((storage_dict['ip'],storage_dict['port']),), storage_dict['storage_name'])
] = (zeo_conf[storage_dict['storage_name']]['path'], check_path or mount_point)
zodb_configuration_string = self.substituteTemplate(
self.getTemplateFilename('zope-zeo-snippet.conf.in'), dict(
storage_name=storage_dict['storage_name'],
address='%s:%s' % (storage_dict['ip'], storage_dict['port']),
mount_point=mount_point, zodb_cache_size=zodb_cache_size,
zeo_client_cache_size=zeo_client_cache_size
))
zope_port = 12000
# One Distribution Node (Single Thread Always)
zope_port += 1
self.installZope(ip, zope_port, 'zope_distribution', with_timerservice=True,
zodb_configuration_string=zodb_configuration_string,
tidstorage_config=tidstorage_config)
# Activity Nodes (Single Thread Always)
for i in range(activity_node_amount):
zope_port += 1
self.installZope(ip, zope_port, 'zope_activity_%s' % i,
with_timerservice=True,
zodb_configuration_string=zodb_configuration_string,
tidstorage_config=tidstorage_config)
# Four Web Page Nodes (Human access)
login_url_list = []
for i in range(user_node_amount):
zope_port += 1
login_url_list.append(self.installZope(ip, zope_port,
'zope_login_%s' % i, with_timerservice=False,
zodb_configuration_string=zodb_configuration_string,
tidstorage_config=tidstorage_config,
thread_amount=thread_amount_per_zope))
login_haproxy = self.installHaproxy(ip, 15001, 'login',
site_check_path, login_url_list)
key_access = None
if key_auth_node_amount > 0:
service_url_list = []
for i in range(key_auth_node_amount):
zope_port += 1
service_url_list.append(self.installZope(ip, zope_port,
'zope_service_%s' % i, with_timerservice=False,
zodb_configuration_string=zodb_configuration_string,
tidstorage_config=tidstorage_config))
service_haproxy = self.installHaproxy(ip, 15000, 'service',
site_check_path, service_url_list)
key_auth_key, key_auth_certificate = self.requestCertificate(
'Key Based Access')
key_access = self.installKeyAuthorisationApache(True, 15500,
service_haproxy, key_auth_key, key_auth_certificate, ca_conf)
self.installTidStorage(tidstorage_config['host'],
tidstorage_config['port'],
known_tid_storage_identifier_dict, 'http://' + login_haproxy)
return login_url_list[-1], login_haproxy, key_access
def _requestZeoFileStorage(self, server_name, storage_name):
"""Local, slap.request compatible, call to ask for filestorage on Zeo
filter_kw can be used to select specific Zeo server
Someday in future it will be possible to invoke:
self.request(
software_release=self.computer_partition.getSoftwareRelease().getURI(),
software_type='Zeo Server',
partition_reference=storage_name,
filter_kw={'server_name': server_name},
shared=True
)
Thanks to this it will be possible to select precisely on which server
which storage will be placed.
"""
base_port = 35001
if getattr(self, '_zeo_storage_dict', None) is None:
self._zeo_storage_dict = {}
if getattr(self, '_zeo_storage_port_dict', None) is None:
self._zeo_storage_port_dict = {}
self._zeo_storage_port_dict.setdefault(server_name,
base_port+len(self._zeo_storage_port_dict))
self._zeo_storage_dict[server_name] = self._zeo_storage_dict.get(
server_name, []) + [storage_name]
return dict(
ip=self.getLocalIPv4Address(),
port=self._zeo_storage_port_dict[server_name],
storage_name=storage_name
)
def installLogrotate(self):
"""Installs logortate main configuration file and registers its to cron"""
logrotate_d = os.path.abspath(os.path.join(self.etc_directory,
'logrotate.d'))
self._createDirectory(logrotate_d)
logrotate_backup = self.createBackupDirectory('logrotate')
logrotate_conf = self.createConfigurationFile("logrotate.conf",
"include %s" % logrotate_d)
logrotate_cron = os.path.join(self.cron_d, 'logrotate')
state_file = os.path.join(self.data_root_directory, 'logrotate.status')
open(logrotate_cron, 'w').write('0 0 * * * %s -s %s %s' %
(self.options['logrotate_binary'], state_file, logrotate_conf))
self.path_list.extend([logrotate_d, logrotate_conf, logrotate_cron])
return logrotate_d, logrotate_backup
def registerLogRotation(self, name, log_file_list, postrotate_script):
"""Register new log rotation requirement"""
open(os.path.join(self.logrotate_d, name), 'w').write(
self.substituteTemplate(self.getTemplateFilename(
'logrotate_entry.in'),
dict(file_list=' '.join(['"'+q+'"' for q in log_file_list]),
postrotate=postrotate_script, olddir=self.logrotate_backup)))
def linkBinary(self):
"""Links binaries to instance's bin directory for easier exposal"""
for linkline in self.options.get('link_binary_list', '').splitlines():
if not linkline:
continue
target = linkline.split()
if len(target) == 1:
target = target[0]
path, linkname = os.path.split(target)
else:
linkname = target[1]
target = target[0]
link = os.path.join(self.bin_directory, linkname)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(target, link)
self.logger.debug('Created link %r -> %r' % (link, target))
self.path_list.append(link)
def installKumo(self, ip, kumo_manager_port=13101, kumo_server_port=13201,
kumo_server_listen_port=13202, kumo_gateway_port=13301):
# XXX: kumo is not storing pid in file, unless it is not running as daemon
# but running daemons is incompatible with SlapOS, so there is currently
# no way to have Kumo's pid files to rotate logs and send signals to them
config = dict(
kumo_gateway_binary=self.options['kumo_gateway_binary'],
kumo_gateway_ip=ip,
kumo_gateway_log=os.path.join(self.log_directory, "kumo-gateway.log"),
kumo_manager_binary=self.options['kumo_manager_binary'],
kumo_manager_ip=ip,
kumo_manager_log=os.path.join(self.log_directory, "kumo-manager.log"),
kumo_server_binary=self.options['kumo_server_binary'],
kumo_server_ip=ip,
kumo_server_log=os.path.join(self.log_directory, "kumo-server.log"),
kumo_server_storage=os.path.join(self.data_root_directory, "kumodb.tch"),
kumo_manager_port=kumo_manager_port,
kumo_server_port=kumo_server_port,
kumo_server_listen_port=kumo_server_listen_port,
kumo_gateway_port=kumo_gateway_port
)
self.path_list.append(self.createRunningWrapper('kumo_gateway',
self.substituteTemplate(self.getTemplateFilename('kumo_gateway.in'),
config)))
self.path_list.append(self.createRunningWrapper('kumo_manager',
self.substituteTemplate(self.getTemplateFilename('kumo_manager.in'),
config)))
self.path_list.append(self.createRunningWrapper('kumo_server',
self.substituteTemplate(self.getTemplateFilename('kumo_server.in'),
config)))
return dict(
kumo_address = '%s:%s' % (config['kumo_gateway_ip'],
config['kumo_gateway_port']),
kumo_gateway_ip=config['kumo_gateway_ip'],
kumo_gateway_port=config['kumo_gateway_port'],
)
def installMemcached(self, ip, port):
config = dict(
memcached_binary=self.options['memcached_binary'],
memcached_ip=ip,
memcached_port=port,
)
self.path_list.append(self.createRunningWrapper('memcached',
self.substituteTemplate(self.getTemplateFilename('memcached.in'),
config)))
return dict(memcached_url='%s:%s' %
(config['memcached_ip'], config['memcached_port']),
memcached_ip=config['memcached_ip'],
memcached_port=config['memcached_port'])
def installSphinxSearchd(self, ip, port=9312, sql_port=9306):
data_directory = self.createDataDirectory('sphinx')
sphinx_conf_path = self.createConfigurationFile('sphinx.conf',
self.substituteTemplate(self.getTemplateFilename('sphinx.conf.in'), dict(
ip_address=ip,
port=port,
sql_port=sql_port,
data_directory=data_directory,
log_directory=self.log_directory,
)))
self.path_list.append(sphinx_conf_path)
wrapper = zc.buildout.easy_install.scripts([('sphinx_searchd',
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['sphinx_searchd_binary'].strip(), '-c', sphinx_conf_path, '--nodetach']
)[0]
self.path_list.append(wrapper)
return dict(sphinx_searchd_ip=ip,
sphinx_searchd_port=port,
sphinx_searchd_sql_port=sql_port)
def installTestRunner(self, ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf):
"""Installs bin/runUnitTest executable to run all tests using
bin/runUnitTest"""
testinstance = self.createDataDirectory('testinstance')
# workaround wrong assumptions of ERP5Type.tests.runUnitTest about
# directory existence
unit_test = os.path.join(testinstance, 'unit_test')
connection_string_list = []
for test_database, test_user, test_password in \
mysql_conf['mysql_parallel_test_dict'][-4:]:
connection_string_list.append(
'%s@%s:%s %s %s' % (test_database, mysql_conf['ip'],
mysql_conf['tcp_port'], test_user, test_password))
if not os.path.isdir(unit_test):
os.mkdir(unit_test)
runUnitTest = zc.buildout.easy_install.scripts([
('runUnitTest', __name__ + '.testrunner', 'runUnitTest')],
self.ws, sys.executable, self.bin_directory, arguments=[dict(
instance_home=testinstance,
prepend_path=self.bin_directory,
openssl_binary=self.options['openssl_binary'],
test_ca_path=ca_conf['certificate_authority_path'],
call_list=[self.options['runUnitTest_binary'],
'--erp5_sql_connection_string', '%(mysql_test_database)s@%'
'(ip)s:%(tcp_port)s %(mysql_test_user)s '
'%(mysql_test_password)s' % mysql_conf,
'--extra_sql_connection_string_list',','.join(connection_string_list),
'--conversion_server_hostname=%(conversion_server_ip)s' % \
conversion_server_conf,
'--conversion_server_port=%(conversion_server_port)s' % \
conversion_server_conf,
'--volatile_memcached_server_hostname=%(memcached_ip)s' % memcached_conf,
'--volatile_memcached_server_port=%(memcached_port)s' % memcached_conf,
'--persistent_memcached_server_hostname=%(kumo_gateway_ip)s' % kumo_conf,
'--persistent_memcached_server_port=%(kumo_gateway_port)s' % kumo_conf,
]
)])[0]
self.path_list.append(runUnitTest)
def installTestSuiteRunner(self, ca_conf, mysql_conf, conversion_server_conf,
memcached_conf, kumo_conf):
"""Installs bin/runTestSuite executable to run all tests using
bin/runUnitTest"""
testinstance = self.createDataDirectory('test_suite_instance')
# workaround wrong assumptions of ERP5Type.tests.runUnitTest about
# directory existence
unit_test = os.path.join(testinstance, 'unit_test')
if not os.path.isdir(unit_test):
os.mkdir(unit_test)
connection_string_list = []
for test_database, test_user, test_password in \
mysql_conf['mysql_parallel_test_dict']:
connection_string_list.append(
'%s@%s:%s %s %s' % (test_database, mysql_conf['ip'],
mysql_conf['tcp_port'], test_user, test_password))
command = zc.buildout.easy_install.scripts([
('runTestSuite', __name__ + '.test_suite_runner', 'runTestSuite')],
self.ws, sys.executable, self.bin_directory, arguments=[dict(
instance_home=testinstance,
prepend_path=self.bin_directory,
openssl_binary=self.options['openssl_binary'],
test_ca_path=ca_conf['certificate_authority_path'],
call_list=[self.options['runTestSuite_binary'],
'--db_list', ','.join(connection_string_list),
'--conversion_server_hostname=%(conversion_server_ip)s' % \
conversion_server_conf,
'--conversion_server_port=%(conversion_server_port)s' % \
conversion_server_conf,
'--volatile_memcached_server_hostname=%(memcached_ip)s' % memcached_conf,
'--volatile_memcached_server_port=%(memcached_port)s' % memcached_conf,
'--persistent_memcached_server_hostname=%(kumo_gateway_ip)s' % kumo_conf,
'--persistent_memcached_server_port=%(kumo_gateway_port)s' % kumo_conf,
]
)])[0]
self.path_list.append(command)
def installCrond(self):
timestamps = self.createDataDirectory('cronstamps')
cron_output = os.path.join(self.log_directory, 'cron-output')
self._createDirectory(cron_output)
catcher = zc.buildout.easy_install.scripts([('catchcron',
__name__ + '.catdatefile', 'catdatefile')], self.ws, sys.executable,
self.bin_directory, arguments=[cron_output])[0]
self.path_list.append(catcher)
cron_d = os.path.join(self.etc_directory, 'cron.d')
crontabs = os.path.join(self.etc_directory, 'crontabs')
self._createDirectory(cron_d)
self._createDirectory(crontabs)
wrapper = zc.buildout.easy_install.scripts([('crond',
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['dcrond_binary'].strip(), '-s', cron_d, '-c', crontabs,
'-t', timestamps, '-f', '-l', '5', '-M', catcher]
)[0]
self.path_list.append(wrapper)
return cron_d
def requestCertificate(self, name):
hash = hashlib.sha512(name).hexdigest()
key = os.path.join(self.ca_private, hash + self.ca_key_ext)
certificate = os.path.join(self.ca_certs, hash + self.ca_crt_ext)
parser = ConfigParser.RawConfigParser()
parser.add_section('certificate')
parser.set('certificate', 'name', name)
parser.set('certificate', 'key_file', key)
parser.set('certificate', 'certificate_file', certificate)
parser.write(open(os.path.join(self.ca_request_dir, hash), 'w'))
return key, certificate
def installCertificateAuthority(self, ca_country_code='XX',
ca_email='xx@example.com', ca_state='State', ca_city='City',
ca_company='Company'):
backup_path = self.createBackupDirectory('ca')
self.ca_dir = os.path.join(self.data_root_directory, 'ca')
self._createDirectory(self.ca_dir)
self.ca_request_dir = os.path.join(self.ca_dir, 'requests')
self._createDirectory(self.ca_request_dir)
config = dict(ca_dir=self.ca_dir, request_dir=self.ca_request_dir)
self.ca_private = os.path.join(self.ca_dir, 'private')
self.ca_certs = os.path.join(self.ca_dir, 'certs')
self.ca_crl = os.path.join(self.ca_dir, 'crl')
self.ca_newcerts = os.path.join(self.ca_dir, 'newcerts')
self.ca_key_ext = '.key'
self.ca_crt_ext = '.crt'
for d in [self.ca_private, self.ca_crl, self.ca_newcerts, self.ca_certs]:
self._createDirectory(d)
for f in ['crlnumber', 'serial']:
if not os.path.exists(os.path.join(self.ca_dir, f)):
open(os.path.join(self.ca_dir, f), 'w').write('01')
if not os.path.exists(os.path.join(self.ca_dir, 'index.txt')):
open(os.path.join(self.ca_dir, 'index.txt'), 'w').write('')
openssl_configuration = os.path.join(self.ca_dir, 'openssl.cnf')
config.update(
working_directory=self.ca_dir,
country_code=ca_country_code,
state=ca_state,
city=ca_city,
company=ca_company,
email_address=ca_email,
)
self._writeFile(openssl_configuration, pkg_resources.resource_string(
__name__, 'template/openssl.cnf.ca.in') % config)
self.path_list.extend(zc.buildout.easy_install.scripts([
('certificate_authority',
__name__ + '.certificate_authority', 'runCertificateAuthority')],
self.ws, sys.executable, self.wrapper_directory, arguments=[dict(
openssl_configuration=openssl_configuration,
openssl_binary=self.options['openssl_binary'],
certificate=os.path.join(self.ca_dir, 'cacert.pem'),
key=os.path.join(self.ca_private, 'cakey.pem'),
crl=os.path.join(self.ca_crl),
request_dir=self.ca_request_dir
)]))
# configure backup
backup_cron = os.path.join(self.cron_d, 'ca_rdiff_backup')
open(backup_cron, 'w').write(
'''0 0 * * * %(rdiff_backup)s %(source)s %(destination)s'''%dict(
rdiff_backup=self.options['rdiff_backup_binary'],
source=self.ca_dir,
destination=backup_path))
self.path_list.append(backup_cron)
return dict(
ca_certificate=os.path.join(config['ca_dir'], 'cacert.pem'),
ca_crl=os.path.join(config['ca_dir'], 'crl'),
certificate_authority_path=config['ca_dir']
)
def installConversionServer(self, ip, port, openoffice_port):
name = 'conversion_server'
working_directory = self.createDataDirectory(name)
conversion_server_dict = dict(
working_path=working_directory,
uno_path=self.options['ooo_uno_path'],
office_binary_path=self.options['ooo_binary_path'],
ip=ip,
port=port,
openoffice_port=openoffice_port,
)
for env_line in self.options['environment'].splitlines():
env_line = env_line.strip()
if not env_line:
continue
if '=' in env_line:
env_key, env_value = env_line.split('=')
conversion_server_dict[env_key.strip()] = env_value.strip()
else:
raise zc.buildout.UserError('Line %r in environment parameter is '
'incorrect' % env_line)
config_file = self.createConfigurationFile(name + '.cfg',
self.substituteTemplate(self.getTemplateFilename('cloudooo.cfg.in'),
conversion_server_dict))
self.path_list.append(config_file)
self.path_list.extend(zc.buildout.easy_install.scripts([(name,
'slapos.recipe.librecipe.execute', 'execute_with_signal_translation')], self.ws,
sys.executable, self.wrapper_directory,
arguments=[self.options['ooo_paster'].strip(), 'serve', config_file]))
return {
name + '_port': conversion_server_dict['port'],
name + '_ip': conversion_server_dict['ip']
}
def installHaproxy(self, ip, port, name, server_check_path, url_list):
# inter must be quite short in order to detect quickly an unresponsive node
# and to detect quickly a node which is back
# rise must be minimal possible : 1, indeed, a node which is back don't need
# to sleep more time and we can give him work immediately
# fall should be quite sort. with inter at 3, and fall at 2, a node will be
# considered as dead after 6 seconds.
# maxconn should be set as the maximum thread we have per zope, like this
# haproxy will manage the queue of request with the possibility to
# move a request to another node if the initially selected one is dead
# maxqueue is the number of waiting request in the queue of every zope client.
# It allows to make sure that there is not a zope client handling all
# the work while other clients are doing nothing. This was happening
# even thoug we have round robin distribution because when a node dies
# some seconds, all request are dispatched to other nodes, and then users
# stick in other nodes and are not coming back. Please note this option
# is not an issue if you have more than (maxqueue * node_quantity) requests
# because haproxy will handle a top-level queue
server_template = """ server %(name)s %(address)s cookie %(name)s check inter 3s rise 1 fall 2 maxqueue 5 maxconn %(cluster_zope_thread_amount)s"""
config = dict(name=name, ip=ip, port=port,
server_check_path=server_check_path,)
i = 1
server_list = []
cluster_zope_thread_amount = self.options.get('cluster_zope_thread_amount', 1)
for url in url_list:
server_list.append(server_template % dict(name='%s_%s' % (name, i),
address=url, cluster_zope_thread_amount=cluster_zope_thread_amount))
i += 1
config['server_text'] = '\n'.join(server_list)
haproxy_conf_path = self.createConfigurationFile('haproxy_%s.cfg' % name,
self.substituteTemplate(self.getTemplateFilename('haproxy.cfg.in'),
config))
self.path_list.append(haproxy_conf_path)
wrapper = zc.buildout.easy_install.scripts([('haproxy_%s' % name,
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['haproxy_binary'].strip(), '-f', haproxy_conf_path]
)[0]
self.path_list.append(wrapper)
return '%s:%s' % (ip, port)
def installERP5(self):
"""
All zope have to share file created by portal_classes
(until everything is integrated into the ZODB).
So, do not request zope instance and create multiple in the same partition.
"""
# Create instance directories
self.erp5_directory = self.createDataDirectory('erp5shared')
# Create init user
password = self.generatePassword()
# XXX Unhardcoded me please
user = 'zope'
Zope2InitUser(
os.path.join(self.erp5_directory, "inituser"), user, password)
self._createDirectory(self.erp5_directory)
for directory in (
'Constraint',
'Document',
'Extensions',
'PropertySheet',
'import',
'lib',
'tests',
'Products',
'etc',
):
self._createDirectory(os.path.join(self.erp5_directory, directory))
self._createDirectory(os.path.join(self.erp5_directory, 'etc',
'package-includes'))
# Symlink to BT5 repositories defined in instance config.
# Those paths will eventually end up in the ZODB, and having symlinks
# inside the XXX makes it possible to reuse such ZODB with another software
# release[ version].
# Note: this path cannot be used for development, it's really just a
# read-only repository.
repository_path = os.path.join(self.var_directory, "bt5_repository")
if not os.path.isdir(repository_path):
os.mkdir(repository_path)
self.path_list.append(repository_path)
self.bt5_repository_list = []
append = self.bt5_repository_list.append
for repository in self.options.get('bt5_repository_list', '').split():
repository = repository.strip()
if not repository:
continue
if _isurl(repository) and not repository.startswith("file://"):
# XXX: assume it's a valid URL
append(repository)
continue
if repository.startswith('file://'):
repository = repository.replace('file://', '', '')
if os.path.isabs(repository):
repo_id = hashlib.sha1(repository).hexdigest()
link = os.path.join(repository_path, repo_id)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(repository, link)
self.logger.debug('Created link %r -> %r' % (link, repository_path))
# Always provide a URL-Type
append("file://" + link)
return user, password
def installERP5Site(self, user, password, zope_access, mysql_conf,
conversion_server_conf=None, memcached_conf=None,
kumo_conf=None,
erp5_site_id='erp5', default_bt5_list=[], ca_conf={},
supervisor_controlled=True):
"""
Create a script to automatically set up an erp5 site (controlled by
supervisor by default) on available zope and mysql environments.
"""
conversion_server = None
if conversion_server_conf is not None:
conversion_server = "%s:%s" % (conversion_server_conf['conversion_server_ip'],
conversion_server_conf['conversion_server_port'])
if memcached_conf is None:
memcached_conf = {}
if kumo_conf is None:
kumo_conf = {}
# XXX Conversion server and memcache server coordinates are not relevant
# for pure site creation.
assert mysql_conf['mysql_user'] and mysql_conf['mysql_password'], \
"ZMySQLDA requires a user and a password for socket connections"
# XXX Use socket access to prevent unwanted connections to original MySQL
# server when cloning an existing ERP5 instance.
# TCP will be required if MySQL is in a different partition/server.
mysql_connection_string = "%(mysql_database)s %(mysql_user)s %(mysql_password)s %(socket)s" % mysql_conf
bt5_list = self.parameter_dict.get("bt5_list", "").split() or default_bt5_list
bt5_repository_list = self.parameter_dict.get("bt5_repository_list", "").split() \
or getattr(self, 'bt5_repository_list', [])
erp5_update_directory = supervisor_controlled and self.wrapper_directory or \
self.bin_directory
script = zc.buildout.easy_install.scripts([('erp5_update',
__name__ + '.erp5', 'updateERP5')], self.ws,
sys.executable, erp5_update_directory,
arguments=[erp5_site_id,
mysql_connection_string,
[user, password, zope_access],
memcached_conf.get('memcached_url'),
conversion_server,
kumo_conf.get("kumo_address"),
bt5_list,
bt5_repository_list,
ca_conf.get('certificate_authority_path'),
self.options.get('openssl_binary')])
self.path_list.extend(script)
return []
def installZeo(self, ip):
zodb_dir = os.path.join(self.data_root_directory, 'zodb')
self._createDirectory(zodb_dir)
zeo_configuration_dict = {}
zeo_number = 0
for zeo_server in sorted(self._zeo_storage_dict.iterkeys()):
zeo_number += 1
zeo_event_log = os.path.join(self.log_directory, 'zeo-%s.log'% zeo_number)
zeo_pid = os.path.join(self.run_directory, 'zeo-%s.pid'% zeo_number)
self.registerLogRotation('zeo-%s' % zeo_number, [zeo_event_log],
self.killpidfromfile + ' ' + zeo_pid + ' SIGUSR2')
config = dict(
zeo_ip=ip,
zeo_port=self._zeo_storage_port_dict[zeo_server],
zeo_event_log=zeo_event_log,
zeo_pid=zeo_pid,
)
storage_definition_list = []
for storage_name in sorted(self._zeo_storage_dict[zeo_server]):
path = os.path.join(zodb_dir, '%s.fs' % storage_name)
storage_definition_list.append("""<filestorage %(storage_name)s>
path %(path)s
</filestorage>"""% dict(storage_name=storage_name, path=path))
zeo_configuration_dict[storage_name] = dict(
ip=ip,
port=config['zeo_port'],
path=path
)
config['zeo_filestorage_snippet'] = '\n'.join(storage_definition_list)
zeo_conf_path = self.createConfigurationFile('zeo-%s.conf' % zeo_number,
self.substituteTemplate(self.getTemplateFilename('zeo.conf.in'), config))
self.path_list.append(zeo_conf_path)
wrapper = zc.buildout.easy_install.scripts([('zeo_%s' % zeo_number,
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['runzeo_binary'].strip(), '-C', zeo_conf_path]
)[0]
self.path_list.append(wrapper)
return zeo_configuration_dict
def installRepozo(self, zodb_root_path):
"""
Add only repozo to cron (e.g. without tidstorage) allowing full
and incremental backups.
"""
backup_path = self.createBackupDirectory('zodb')
repozo_cron_path = os.path.join(self.cron_d, 'repozo')
repozo_cron_file = open(repozo_cron_path, 'w')
try:
repozo_cron_file.write('''
0 0 * * 0 %(repozo_binary)s --backup --full --file="%(zodb_root_path)s" --repository="%(backup_path)s"
0 * * * * %(repozo_binary)s --backup --file="%(zodb_root_path)s" --repository="%(backup_path)s"
''' % dict(repozo_binary=self.options['repozo_binary'],
zodb_root_path=zodb_root_path,
backup_path=backup_path))
finally:
repozo_cron_file.close()
self.path_list.append(repozo_cron_path)
def installTidStorage(self, ip, port, known_tid_storage_identifier_dict,
access_url):
"""Install TidStorage with all required backup tools
known_tid_storage_identifier_dict is a dictionary of:
(((ip, port),), storagename): (filestorage path, url for serialize)
url for serialize will be merged with access_url by internal tidstorage
"""
backup_base_path = self.createBackupDirectory('zodb')
# it is time to fill known_tid_storage_identifier_dict with backup
# destination
formatted_storage_dict = dict()
for key, v in known_tid_storage_identifier_dict.copy().iteritems():
# generate unique name for each backup
storage_name = key[1]
destination = os.path.join(backup_base_path, storage_name)
self._createDirectory(destination)
formatted_storage_dict[str(key)] = (v[0], destination, v[1])
logfile = os.path.join(self.log_directory, 'tidstorage.log')
pidfile = os.path.join(self.run_directory, 'tidstorage.pid')
statusfile = os.path.join(self.log_directory, 'tidstorage.tid')
timestamp_file_path = os.path.join(self.log_directory,
'repozo_tidstorage_timestamp.log')
# shared configuration file
tidstorage_config = self.createConfigurationFile('tidstorage.py',
self.substituteTemplate(self.getTemplateFilename('tidstorage.py.in'),
dict(
known_tid_storage_identifier_dict=pprint.pformat(formatted_storage_dict),
base_url='%s/%%s/serialize' % access_url,
host=ip,
port=port,
timestamp_file_path=timestamp_file_path,
logfile=logfile,
pidfile=pidfile,
statusfile=statusfile
)))
# TID server
tidstorage_server = zc.buildout.easy_install.scripts([('tidstoraged',
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
self.options['tidstoraged_binary'], '--nofork', '--config',
tidstorage_config])[0]
self.registerLogRotation('tidsorage', [logfile, timestamp_file_path],
self.killpidfromfile + ' ' + pidfile + ' SIGHUP')
self.path_list.append(tidstorage_config)
self.path_list.append(tidstorage_server)
# repozo wrapper
tidstorage_repozo = zc.buildout.easy_install.scripts([('tidstorage_repozo',
'slapos.recipe.librecipe.execute', 'execute')], self.ws, sys.executable,
self.bin_directory, arguments=[
self.options['tidstorage_repozo_binary'], '--config', tidstorage_config,
'--repozo', self.options['repozo_binary'], '-z'])[0]
self.path_list.append(tidstorage_repozo)
# and backup configuration
tidstorage_repozo_cron = os.path.join(self.cron_d, 'tidstorage_repozo')
open(tidstorage_repozo_cron, 'w').write('''0 0 * * * %(tidstorage_repozo)s
0 0 * * * cp -f %(tidstorage_tid)s %(tidstorage_tid_backup)s'''%dict(
tidstorage_repozo=tidstorage_repozo,
tidstorage_tid=statusfile,
tidstorage_tid_backup=os.path.join(backup_base_path, 'tidstorage.tid')))
self.path_list.append(tidstorage_repozo_cron)
return dict(host=ip, port=port)
def installZope(self, ip, port, name, zodb_configuration_string,
with_timerservice=False, tidstorage_config=None, thread_amount=1,
with_deadlockdebugger=True, zope_environment=None):
default_zope_environment = dict(
TMP=self.tmp_directory,
TMPDIR=self.tmp_directory,
HOME=self.tmp_directory,
PATH=self.bin_directory
)
if zope_environment is None:
zope_environment = default_zope_environment.copy()
else:
for envk, envv in default_zope_environment.iteritems():
if envk not in zope_environment:
zope_environment[envk] = envv
# Create zope configuration file
zope_config = dict(
products=self.options['products'],
thread_amount=thread_amount
)
# configure default Zope2 zcml
open(os.path.join(self.erp5_directory, 'etc', 'site.zcml'), 'w').write(
pkg_resources.resource_string(__name__, 'template/site.zcml'))
zope_config['zodb_configuration_string'] = zodb_configuration_string
zope_config['instance'] = self.erp5_directory
zope_config['event_log'] = os.path.join(self.log_directory,
'%s-event.log' % name)
zope_config['z2_log'] = os.path.join(self.log_directory,
'%s-Z2.log' % name)
zope_config['pid-filename'] = os.path.join(self.run_directory,
'%s.pid' % name)
zope_config['lock-filename'] = os.path.join(self.run_directory,
'%s.lock' % name)
self.registerLogRotation(name, [zope_config['event_log'],
zope_config['z2_log']], self.killpidfromfile + ' ' +
zope_config['pid-filename'] + ' SIGUSR2')
prefixed_products = []
for product in reversed(zope_config['products'].split()):
product = product.strip()
if product:
prefixed_products.append('products %s' % product)
prefixed_products.insert(0, 'products %s' % os.path.join(
self.erp5_directory, 'Products'))
zope_config['products'] = '\n'.join(prefixed_products)
zope_config['address'] = '%s:%s' % (ip, port)
zope_wrapper_template_location = self.getTemplateFilename('zope.conf.in')
zope_conf_content = self.substituteTemplate(
zope_wrapper_template_location, zope_config)
if with_timerservice:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope.conf.timerservice.in'), zope_config)
if tidstorage_config is not None:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope-tidstorage-snippet.conf.in'),
tidstorage_config)
if with_deadlockdebugger:
zope_conf_content += self.substituteTemplate(
self.getTemplateFilename('zope-deadlockdebugger-snippet.conf.in'),
dict(dump_url='/manage_debug_threads',
secret=self.generatePassword()))
zope_conf_path = self.createConfigurationFile("%s.conf" % name,
zope_conf_content)
self.path_list.append(zope_conf_path)
# Create init script
wrapper = zc.buildout.easy_install.scripts([(name,
'slapos.recipe.librecipe.execute', 'executee')], self.ws, sys.executable,
self.wrapper_directory, arguments=[
[self.options['runzope_binary'].strip(), '-C', zope_conf_path],
zope_environment
])[0]
self.path_list.append(wrapper)
return zope_config['address']
def _getApacheConfigurationDict(self, prefix, ip, port):
apache_conf = dict()
apache_conf['pid_file'] = os.path.join(self.run_directory,
......@@ -1131,185 +201,3 @@ SSLCARevocationPath %(ca_crl)s"""
]))
# Note: IPv6 is assumed always
return 'https://%(server_name)s:%(port)s%(frontend_path)s' % (apache_conf)
def installBackendApache(self, ip, port, backend, key, certificate,
suffix='', access_control_string=None):
apache_conf = self._getApacheConfigurationDict('backend_apache'+suffix, ip,
port)
apache_conf['server_name'] = '%s' % apache_conf['ip']
apache_conf['ssl_snippet'] = pkg_resources.resource_string(__name__,
'template/apache.ssl-snippet.conf.in') % dict(
login_certificate=certificate, login_key=key)
apache_config_file = self._writeApacheConfiguration('backend_apache'+suffix,
apache_conf, backend, access_control_string)
self.path_list.append(apache_config_file)
self.path_list.extend(zc.buildout.easy_install.scripts([(
'backend_apache'+suffix,
__name__ + '.apache', 'runApache')], self.ws,
sys.executable, self.wrapper_directory, arguments=[
dict(
required_path_list=[key, certificate],
binary=self.options['httpd_binary'],
config=apache_config_file
)
]))
# Note: IPv6 is assumed always
return 'https://[%(ip)s]:%(port)s' % apache_conf
def installMysqlServer(self, ip, port, database='erp5', user='user',
test_database='test_erp5', test_user='test_user', template_filename=None,
parallel_test_database_amount=100, mysql_conf=None, with_backup=True,
with_percona_toolkit=True):
if mysql_conf is None:
mysql_conf = {}
backup_directory = self.createBackupDirectory('mysql')
if template_filename is None:
template_filename = self.getTemplateFilename('my.cnf.in')
error_log = os.path.join(self.log_directory, 'mysqld.log')
slow_query_log = os.path.join(self.log_directory, 'mysql-slow.log')
mysql_conf.update(
ip=ip,
data_directory=os.path.join(self.data_root_directory,
'mysql'),
tcp_port=port,
pid_file=os.path.join(self.run_directory, 'mysqld.pid'),
socket=os.path.join(self.run_directory, 'mysqld.sock'),
error_log=error_log,
slow_query_log=slow_query_log,
mysql_database=database,
mysql_user=user,
mysql_password=self.generatePassword(),
mysql_test_password=self.generatePassword(),
mysql_test_database=test_database,
mysql_test_user=test_user,
mysql_parallel_test_dict=[
('test_%i' % x,)*2 + (self.generatePassword(),) \
for x in xrange(0,parallel_test_database_amount)],
)
self.registerLogRotation('mysql', [error_log, slow_query_log],
'%(mysql_binary)s --no-defaults -B --user=root '
'--socket=%(mysql_socket)s -e "FLUSH LOGS"' % dict(
mysql_binary=self.options['mysql_binary'],
mysql_socket=mysql_conf['socket']))
self._createDirectory(mysql_conf['data_directory'])
mysql_conf_path = self.createConfigurationFile("my.cnf",
self.substituteTemplate(template_filename,
mysql_conf))
mysql_script_list = []
mysql_script_list.append(pkg_resources.resource_string(__name__,
'template/mysql-init-function.sql.in'))
for x_database, x_user, x_password in \
[(mysql_conf['mysql_database'],
mysql_conf['mysql_user'],
mysql_conf['mysql_password']),
(mysql_conf['mysql_test_database'],
mysql_conf['mysql_test_user'],
mysql_conf['mysql_test_password']),
] + mysql_conf['mysql_parallel_test_dict']:
mysql_script_list.append(pkg_resources.resource_string(__name__,
'template/mysql-init-database.sql.in') % {
'mysql_database': x_database,
'mysql_user': x_user,
'mysql_password': x_password})
mysql_script_list.append('EXIT')
mysql_script = '\n'.join(mysql_script_list)
self.path_list.extend(zc.buildout.easy_install.scripts([('mysql_update',
__name__ + '.mysql', 'updateMysql')], self.ws,
sys.executable, self.wrapper_directory, arguments=[dict(
mysql_script=mysql_script,
mysql_binary=self.options['mysql_binary'].strip(),
mysql_upgrade_binary=self.options['mysql_upgrade_binary'].strip(),
socket=mysql_conf['socket'],
)]))
self.path_list.extend(zc.buildout.easy_install.scripts([('mysqld',
__name__ + '.mysql', 'runMysql')], self.ws,
sys.executable, self.wrapper_directory, arguments=[dict(
mysql_install_binary=self.options['mysql_install_binary'].strip(),
mysqld_binary=self.options['mysqld_binary'].strip(),
data_directory=mysql_conf['data_directory'].strip(),
mysql_binary=self.options['mysql_binary'].strip(),
socket=mysql_conf['socket'].strip(),
configuration_file=mysql_conf_path,
)]))
self.path_list.extend([mysql_conf_path])
if with_backup:
# backup configuration
backup_directory = self.createBackupDirectory('mysql')
full_backup = os.path.join(backup_directory, 'full')
incremental_backup = os.path.join(backup_directory, 'incremental')
self._createDirectory(full_backup)
self._createDirectory(incremental_backup)
innobackupex_argument_list = [self.options['perl_binary'],
self.options['innobackupex_binary'],
'--defaults-file=%s' % mysql_conf_path,
'--socket=%s' %mysql_conf['socket'].strip(), '--user=root',
'--ibbackup=%s'% self.options['xtrabackup_binary']]
environment = dict(PATH='%s' % self.bin_directory)
innobackupex_incremental = zc.buildout.easy_install.scripts([(
'innobackupex_incremental','slapos.recipe.librecipe.execute', 'executee')],
self.ws, sys.executable, self.bin_directory, arguments=[
innobackupex_argument_list + ['--incremental'],
environment])[0]
self.path_list.append(innobackupex_incremental)
innobackupex_full = zc.buildout.easy_install.scripts([('innobackupex_full',
'slapos.recipe.librecipe.execute', 'executee')], self.ws,
sys.executable, self.bin_directory, arguments=[
innobackupex_argument_list,
environment])[0]
self.path_list.append(innobackupex_full)
backup_controller = zc.buildout.easy_install.scripts([
('innobackupex_controller', __name__ + '.innobackupex', 'controller')],
self.ws, sys.executable, self.bin_directory,
arguments=[innobackupex_incremental, innobackupex_full, full_backup,
incremental_backup])[0]
self.path_list.append(backup_controller)
mysql_backup_cron = os.path.join(self.cron_d, 'mysql_backup')
open(mysql_backup_cron, 'w').write('0 0 * * * ' + backup_controller)
self.path_list.append(mysql_backup_cron)
if with_percona_toolkit:
# maatkit installation
for pt_script_name in (
'pt-archiver',
'pt-config-diff',
'pt-deadlock-logger',
'pt-duplicate-key-checker',
'pt-fifo-split',
'pt-find',
'pt-fk-error-logger',
'pt-heartbeat',
'pt-index-usage',
'pt-kill',
'pt-log-player',
'pt-online-schema-change',
'pt-query-advisor',
'pt-query-digest',
'pt-show-grants',
'pt-slave-delay',
'pt-slave-find',
'pt-slave-restart',
'pt-table-checksum',
'pt-table-sync',
'pt-tcp-model',
'pt-trend',
'pt-upgrade',
'pt-variable-advisor',
'pt-visual-explain',
):
pt_argument_list = [self.options['perl_binary'],
self.options['%s_binary' % pt_script_name],
'--defaults-file=%s' % mysql_conf_path,
'--socket=%s' %mysql_conf['socket'].strip(), '--user=root',
]
environment = dict(PATH='%s' % self.bin_directory)
pt_exe = zc.buildout.easy_install.scripts([(
pt_script_name,'slapos.recipe.librecipe.execute', 'executee')],
self.ws, sys.executable, self.bin_directory, arguments=[
pt_argument_list, environment])[0]
self.path_list.append(pt_exe)
# The return could be more explicit database, user ...
return mysql_conf
import os
import sys
import time
def catdatefile(args):
directory = args[0]
try:
suffix = args[1]
except IndexError:
suffix = '.log'
f = open(os.path.join(directory,
time.strftime('%Y-%m-%d.%H:%M.%s') + suffix), 'aw')
for line in sys.stdin.read():
f.write(line)
f.close()
import os
import subprocess
import time
import ConfigParser
def popenCommunicate(command_list, input=None):
subprocess_kw = dict(stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if input is not None:
subprocess_kw.update(stdin=subprocess.PIPE)
popen = subprocess.Popen(command_list, **subprocess_kw)
result = popen.communicate(input)[0]
if popen.returncode is None:
popen.kill()
if popen.returncode != 0:
raise ValueError('Issue during calling %r, result was:\n%s' % (
command_list, result))
return result
class CertificateAuthority:
def __init__(self, key, certificate, openssl_binary,
openssl_configuration, request_dir):
self.key = key
self.certificate = certificate
self.openssl_binary = openssl_binary
self.openssl_configuration = openssl_configuration
self.request_dir = request_dir
def checkAuthority(self):
file_list = [ self.key, self.certificate ]
ca_ready = True
for f in file_list:
if not os.path.exists(f):
ca_ready = False
break
if ca_ready:
return
for f in file_list:
if os.path.exists(f):
os.unlink(f)
try:
# no CA, let us create new one
popenCommunicate([self.openssl_binary, 'req', '-nodes', '-config',
self.openssl_configuration, '-new', '-x509', '-extensions',
'v3_ca', '-keyout', self.key, '-out', self.certificate,
'-days', '10950'], 'Automatic Certificate Authority\n')
except:
try:
for f in file_list:
if os.path.exists(f):
os.unlink(f)
except:
# do not raise during cleanup
pass
raise
def _checkCertificate(self, common_name, key, certificate):
file_list = [key, certificate]
ready = True
for f in file_list:
if not os.path.exists(f):
ready = False
break
if ready:
return False
for f in file_list:
if os.path.exists(f):
os.unlink(f)
csr = certificate + '.csr'
try:
popenCommunicate([self.openssl_binary, 'req', '-config',
self.openssl_configuration, '-nodes', '-new', '-keyout',
key, '-out', csr, '-days', '3650'],
common_name + '\n')
try:
popenCommunicate([self.openssl_binary, 'ca', '-batch', '-config',
self.openssl_configuration, '-out', certificate,
'-infiles', csr])
finally:
if os.path.exists(csr):
os.unlink(csr)
except:
try:
for f in file_list:
if os.path.exists(f):
os.unlink(f)
except:
# do not raise during cleanup
pass
raise
else:
return True
def checkRequestDir(self):
for request_file in os.listdir(self.request_dir):
parser = ConfigParser.RawConfigParser()
parser.readfp(open(os.path.join(self.request_dir, request_file), 'r'))
if self._checkCertificate(parser.get('certificate', 'name'),
parser.get('certificate', 'key_file'), parser.get('certificate',
'certificate_file')):
print 'Created certificate %r' % parser.get('certificate', 'name')
def runCertificateAuthority(args):
ca_conf = args[0]
ca = CertificateAuthority(ca_conf['key'], ca_conf['certificate'],
ca_conf['openssl_binary'], ca_conf['openssl_configuration'],
ca_conf['request_dir'])
while True:
ca.checkAuthority()
ca.checkRequestDir()
time.sleep(60)
import sys
import os
import signal
def killpidfromfile():
file = sys.argv[1]
sig = getattr(signal, sys.argv[2], None)
if sig is None:
raise ValueError('Unknwon signal name %s' % sys.argv[2])
if os.path.exists(file):
pid = int(open(file).read())
print 'Killing pid %s with signal %s' % (pid, sys.argv[2])
os.kill(pid, sig)
......@@ -3,5 +3,7 @@ SSLCertificateFile %(login_certificate)s
SSLCertificateKeyFile %(login_key)s
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
SSLProtocol -ALL +SSLv3 +TLSv1
SSLCipherSuite ALL:!aNULL:!ADH:!eNULL:!LOW:!EXP:RC4+RSA:+HIGH:+MEDIUM
SSLProxyEngine On
......@@ -26,6 +26,10 @@ TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
ServerTokens Prod
ServerSignature Off
TraceEnable Off
# As backend is trusting REMOTE_USER header unset it always
RequestHeader unset REMOTE_USER
......
%(file_list)s {
daily
dateext
rotate 3650
compress
notifempty
sharedscripts
create
postrotate
%(postrotate)s
endscript
olddir %(olddir)s
}
#
# OpenSSL example configuration file.
# This is mostly being used for generation of certificate requests.
#
# This definition stops the following lines choking if HOME isn't
# defined.
HOME = .
RANDFILE = $ENV::HOME/.rnd
# Extra OBJECT IDENTIFIER info:
#oid_file = $ENV::HOME/.oid
oid_section = new_oids
# To use this configuration file with the "-extfile" option of the
# "openssl x509" utility, name here the section containing the
# X.509v3 extensions to use:
# extensions =
# (Alternatively, use a configuration file that has only
# X.509v3 extensions in its main [= default] section.)
[ new_oids ]
# We can add new OIDs in here for use by 'ca', 'req' and 'ts'.
# Add a simple OID like this:
# testoid1=1.2.3.4
# Or use config file substitution like this:
# testoid2=${testoid1}.5.6
# Policies used by the TSA examples.
tsa_policy1 = 1.2.3.4.1
tsa_policy2 = 1.2.3.4.5.6
tsa_policy3 = 1.2.3.4.5.7
####################################################################
[ ca ]
default_ca = CA_default # The default ca section
####################################################################
[ CA_default ]
dir = %(working_directory)s # Where everything is kept
certs = $dir/certs # Where the issued certs are kept
crl_dir = $dir/crl # Where the issued crl are kept
database = $dir/index.txt # database index file.
#unique_subject = no # Set to 'no' to allow creation of
# several ctificates with same subject.
new_certs_dir = $dir/newcerts # default place for new certs.
certificate = $dir/cacert.pem # The CA certificate
serial = $dir/serial # The current serial number
crlnumber = $dir/crlnumber # the current crl number
# must be commented out to leave a V1 CRL
crl = $dir/crl.pem # The current CRL
private_key = $dir/private/cakey.pem # The private key
RANDFILE = $dir/private/.rand # private random number file
x509_extensions = usr_cert # The extentions to add to the cert
# Comment out the following two lines for the "traditional"
# (and highly broken) format.
name_opt = ca_default # Subject Name options
cert_opt = ca_default # Certificate field options
# Extension copying option: use with caution.
# copy_extensions = copy
# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs
# so this is commented out by default to leave a V1 CRL.
# crlnumber must also be commented out to leave a V1 CRL.
# crl_extensions = crl_ext
default_days = 3650 # how long to certify for
default_crl_days= 30 # how long before next CRL
default_md = default # use public key default MD
preserve = no # keep passed DN ordering
# A few difference way of specifying how similar the request should look
# For type CA, the listed attributes must be the same, and the optional
# and supplied fields are just that :-)
policy = policy_match
# For the CA policy
[ policy_match ]
countryName = match
stateOrProvinceName = match
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
# For the 'anything' policy
# At this point in time, you must list all acceptable 'object'
# types.
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
####################################################################
[ req ]
default_bits = 2048
default_md = sha1
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
#attributes = req_attributes
x509_extensions = v3_ca # The extentions to add to the self signed cert
# Passwords for private keys if not present they will be prompted for
# input_password = secret
# output_password = secret
# This sets a mask for permitted string types. There are several options.
# default: PrintableString, T61String, BMPString.
# pkix : PrintableString, BMPString (PKIX recommendation before 2004)
# utf8only: only UTF8Strings (PKIX recommendation after 2004).
# nombstr : PrintableString, T61String (no BMPStrings or UTF8Strings).
# MASK:XXXX a literal mask value.
# WARNING: ancient versions of Netscape crash on BMPStrings or UTF8Strings.
string_mask = utf8only
# req_extensions = v3_req # The extensions to add to a certificate request
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
countryName_value = %(country_code)s
countryName_min = 2
countryName_max = 2
stateOrProvinceName = State or Province Name (full name)
stateOrProvinceName_value = %(state)s
localityName = Locality Name (eg, city)
localityName_value = %(city)s
0.organizationName = Organization Name (eg, company)
0.organizationName_value = %(company)s
# we can do this but it is not needed normally :-)
#1.organizationName = Second Organization Name (eg, company)
#1.organizationName_default = World Wide Web Pty Ltd
commonName = Common Name (eg, your name or your server\'s hostname)
commonName_max = 64
emailAddress = Email Address
emailAddress_value = %(email_address)s
emailAddress_max = 64
# SET-ex3 = SET extension number 3
#[ req_attributes ]
#challengePassword = A challenge password
#challengePassword_min = 4
#challengePassword_max = 20
#
#unstructuredName = An optional company name
[ usr_cert ]
# These extensions are added when 'ca' signs a request.
# This goes against PKIX guidelines but some CAs do it and some software
# requires this to avoid interpreting an end user certificate as a CA.
basicConstraints=CA:FALSE
# Here are some examples of the usage of nsCertType. If it is omitted
# the certificate can be used for anything *except* object signing.
# This is OK for an SSL server.
# nsCertType = server
# For an object signing certificate this would be used.
# nsCertType = objsign
# For normal client use this is typical
# nsCertType = client, email
# and for everything including object signing:
# nsCertType = client, email, objsign
# This is typical in keyUsage for a client certificate.
# keyUsage = nonRepudiation, digitalSignature, keyEncipherment
# This will be displayed in Netscape's comment listbox.
nsComment = "OpenSSL Generated Certificate"
# PKIX recommendations harmless if included in all certificates.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
# This stuff is for subjectAltName and issuerAltname.
# Import the email address.
# subjectAltName=email:copy
# An alternative to produce certificates that aren't
# deprecated according to PKIX.
# subjectAltName=email:move
# Copy subject details
# issuerAltName=issuer:copy
#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem
#nsBaseUrl
#nsRevocationUrl
#nsRenewalUrl
#nsCaPolicyUrl
#nsSslServerName
# This is required for TSA certificates.
# extendedKeyUsage = critical,timeStamping
[ v3_req ]
# Extensions to add to a certificate request
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
[ v3_ca ]
# Extensions for a typical CA
# PKIX recommendation.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer
# This is what PKIX recommends but some broken software chokes on critical
# extensions.
#basicConstraints = critical,CA:true
# So we do this instead.
basicConstraints = CA:true
# Key usage: this is typical for a CA certificate. However since it will
# prevent it being used as an test self-signed certificate it is best
# left out by default.
# keyUsage = cRLSign, keyCertSign
# Some might want this also
# nsCertType = sslCA, emailCA
# Include email address in subject alt name: another PKIX recommendation
# subjectAltName=email:copy
# Copy issuer details
# issuerAltName=issuer:copy
# DER hex encoding of an extension: beware experts only!
# obj=DER:02:03
# Where 'obj' is a standard or added object
# You can even override a supported extension:
# basicConstraints= critical, DER:30:03:01:01:FF
[ crl_ext ]
# CRL extensions.
# Only issuerAltName and authorityKeyIdentifier make any sense in a CRL.
# issuerAltName=issuer:copy
authorityKeyIdentifier=keyid:always
[ proxy_cert_ext ]
# These extensions should be added when creating a proxy certificate
# This goes against PKIX guidelines but some CAs do it and some software
# requires this to avoid interpreting an end user certificate as a CA.
basicConstraints=CA:FALSE
# Here are some examples of the usage of nsCertType. If it is omitted
# the certificate can be used for anything *except* object signing.
# This is OK for an SSL server.
# nsCertType = server
# For an object signing certificate this would be used.
# nsCertType = objsign
# For normal client use this is typical
# nsCertType = client, email
# and for everything including object signing:
# nsCertType = client, email, objsign
# This is typical in keyUsage for a client certificate.
# keyUsage = nonRepudiation, digitalSignature, keyEncipherment
# This will be displayed in Netscape's comment listbox.
nsComment = "OpenSSL Generated Certificate"
# PKIX recommendations harmless if included in all certificates.
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
# This stuff is for subjectAltName and issuerAltname.
# Import the email address.
# subjectAltName=email:copy
# An alternative to produce certificates that aren't
# deprecated according to PKIX.
# subjectAltName=email:move
# Copy subject details
# issuerAltName=issuer:copy
#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem
#nsBaseUrl
#nsRevocationUrl
#nsRenewalUrl
#nsCaPolicyUrl
#nsSslServerName
# This really needs to be in place for it to be a proxy certificate.
proxyCertInfo=critical,language:id-ppl-anyLanguage,pathlen:3,policy:foo
####################################################################
[ tsa ]
default_tsa = tsa_config1 # the default TSA section
[ tsa_config1 ]
# These are used by the TSA reply generation only.
dir = /etc/pki/tls # TSA root directory
serial = $dir/tsaserial # The current serial number (mandatory)
crypto_device = builtin # OpenSSL engine to use for signing
signer_cert = $dir/tsacert.pem # The TSA signing certificate
# (optional)
certs = $dir/cacert.pem # Certificate chain to include in reply
# (optional)
signer_key = $dir/private/tsakey.pem # The TSA private key (optional)
default_policy = tsa_policy1 # Policy if request did not specify it
# (optional)
other_policies = tsa_policy2, tsa_policy3 # acceptable policies (optional)
digests = md5, sha1 # Acceptable message digests (mandatory)
accuracy = secs:1, millisecs:500, microsecs:100 # (optional)
clock_precision_digits = 0 # number of digits after dot. (optional)
ordering = yes # Is ordering defined for timestamps?
# (optional, default: no)
tsa_name = yes # Must the TSA name be included in the reply?
# (optional, default: no)
ess_cert_id_chain = no # Must the ESS cert id chain be included?
# (optional, default: no)
known_tid_storage_identifier_dict = %(known_tid_storage_identifier_dict)s
base_url = '%(base_url)s'
address = '%(host)s'
port = %(port)s
#fork = False
#setuid = None
#setgid = None
burst_period = 30
full_dump_period = 300
timestamp_file_path = '%(timestamp_file_path)s'
logfile_name = '%(logfile)s'
pidfile_name = '%(pidfile)s'
status_file = '%(statusfile)s'
# DeadlockDebugger configuration
<product-config DeadlockDebugger>
dump_url %(dump_url)s
secret %(secret)s
</product-config>
# TIDStorage connection
<product-config TIDStorage>
backend-ip %(host)s
backend-port %(port)s
</product-config>
<zodb_db %(storage_name)s>
cache-size %(zodb_cache_size)d
mount-point %(mount_point)s
<zeoclient>
cache-size %(zeo_client_cache_size)s
server %(address)s
storage %(storage_name)s
name %(storage_name)s
</zeoclient>
</zodb_db>
<zodb_db root>
cache-size %(zodb_cache_size)d
<filestorage>
path %(zodb_root_path)s
</filestorage>
mount-point /
</zodb_db>
import os
import sys
def runTestSuite(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ['PATH'].split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
import os
import sys
def runUnitTest(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ['PATH'].split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import urlparse
class Recipe(GenericBaseRecipe):
def install(self):
testinstance = self.options['test-instance-path']
mysql_connection_string_list = []
path_list = []
# XXX: assume existence of 100 test databases, because slaves are not
# functional yet in slapos: testdb_0...testdb_100, with testuser_N
mysql_template = "%s@%s:%s %s %s"
mysql_parsed = urlparse.urlparse(self.options['mysql-url'])
for i in range(0, 100):
mysql_connection_string_list.append(mysql_template % ('testdb_%s'% i,
mysql_parsed.hostname, mysql_parsed.port, 'testuser_%s'% i, mysql_parsed.password))
mysql_connection_string = mysql_template % ('erp5_test', mysql_parsed.hostname,
mysql_parsed.port, 'erp5_test', mysql_parsed.password)
cloudooo_parsed = urlparse.urlparse(self.options['cloudooo-url'])
memcached_parsed = urlparse.urlparse(self.options['memcached-url'])
kumofs_parsed = urlparse.urlparse(self.options['kumofs-url'])
common_dict = dict(
instance_home=testinstance,
prepend_path=self.options['prepend-path'],
openssl_binary=self.options['openssl-binary'],
test_ca_path=self.options['certificate-authority-path'],
)
common_list = [
'--conversion_server_hostname=%s' % cloudooo_parsed.hostname,
'--conversion_server_port=%s' % cloudooo_parsed.port,
'--volatile_memcached_server_hostname=%s' % memcached_parsed.hostname,
'--volatile_memcached_server_port=%s' % memcached_parsed.port,
'--persistent_memcached_server_hostname=%s' % kumofs_parsed.hostname,
'--persistent_memcached_server_port=%s' % kumofs_parsed.port,
]
path_list.append(self.createPythonScript(self.options['run-unit-test'],
__name__ + '.test.runUnitTest', [dict(
call_list=[self.options['run-unit-test-binary'],
'--erp5_sql_connection_string', mysql_connection_string,
'--extra_sql_connection_string_list', ','.join(
mysql_connection_string_list),
] + common_list, **common_dict)]))
path_list.append(self.createPythonScript(self.options['run-test-suite'],
__name__ + '.test.runUnitTest', [dict(
call_list=[self.options['run-test-suite-binary'],
'--db_list', ','.join(mysql_connection_string_list),
] + common_list, **common_dict)]))
return path_list
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import sys
def runTestSuite(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ['PATH'].split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
def runUnitTest(args):
env = os.environ.copy()
d = args[0]
env['OPENSSL_BINARY'] = d['openssl_binary']
env['TEST_CA_PATH'] = d['test_ca_path']
env['PATH'] = ':'.join([d['prepend_path']] + os.environ.get('PATH', '').split(':'))
env['INSTANCE_HOME'] = d['instance_home']
env['REAL_INSTANCE_HOME'] = d['instance_home']
# Deal with Shebang size limitation
executable_filepath = d['call_list'][0]
file_object = open(executable_filepath, 'r')
line = file_object.readline()
file_object.close()
argument_list = []
if line[:2] == '#!':
executable_filepath = line[2:].strip()
argument_list.append(executable_filepath)
argument_list.extend(d['call_list'])
argument_list.extend(sys.argv[1:])
argument_list.append(env)
os.execle(executable_filepath, *argument_list)
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import urlparse
from slapos.recipe.librecipe import GenericSlapRecipe
class Recipe(GenericSlapRecipe):
def _install(self):
conversion_server = None
if 'cloudooo-url' in self.options and self.options['cloudooo-url']:
parsed = urlparse.urlparse(self.options['cloudooo-url'])
conversion_server = "%s:%s" % (parsed.hostname, parsed.port)
memcached = None
if 'memcached-url' in self.options and self.options['memcached-url']:
parsed = urlparse.urlparse(self.options['memcached-url'])
memcached = "%s:%s" % (parsed.hostname, parsed.port)
kumofs = None
if 'kumofs-url' in self.options and self.options['kumofs-url']:
parsed = urlparse.urlparse(self.options['kumofs-url'])
kumofs = "%s:%s" % (parsed.hostname, parsed.port)
parsed = urlparse.urlparse(self.options['mysql-url'])
mysql_connection_string = "%(database)s@%(hostname)s:%(port)s "\
"%(username)s %(password)s" % dict(
database=parsed.path.split('/')[1],
hostname=parsed.hostname,
port=parsed.port,
username=parsed.username,
password=parsed.password
)
parsed = urlparse.urlparse(self.options['url'])
zope_user = parsed.username
zope_password = parsed.password
zope_host = '%s:%s' % (parsed.hostname, parsed.port)
bt5_list = []
if len(self.parameter_dict.get("bt5_list", "").strip()):
bt5_list = self.parameter_dict["bt5_list"].split()
elif self.parameter_dict.get("flavour", "default") == 'configurator':
bt5_list = self.options['configurator-bt5-list'].split()
bt5_repository_list = self.parameter_dict.get("bt5_repository_list",
"").split() or self.options['bt5-repository-list'].split()
script = self.createPythonScript(self.options['update-wrapper'],
__name__+'.erp5.updateERP5', [
self.options['site-id'], mysql_connection_string,
[zope_user, zope_password, zope_host],
memcached, conversion_server, kumofs, bt5_list, bt5_repository_list,
self.options['cadir-path'], self.options['openssl-binary']])
return [script]
......@@ -251,16 +251,6 @@ class ERP5Updater(object):
return True
return False
def isCertificateAuthorityConfigured(self):
""" Check if certificate Authority is configured correctly. """
external_connection_dict = self.system_signature_dict[
'external_connection_dict']
if self.certificate_authority_path == external_connection_dict.get(
'portal_certificate_authority/certificate_authority_path') and \
self.openssl_binary == external_connection_dict.get(
'portal_certificate_authority/openssl_binary'):
return True
return False
def isCertificateAuthorityConfigured(self):
""" Check if certificate Authority is configured correctly. """
external_connection_dict = self.system_signature_dict[
......@@ -303,6 +293,7 @@ class ERP5Updater(object):
def updateERP5Site(self):
if not self.isERP5Present():
self.log('INFO', 'No site present, adding new with id %r' % self.site_id)
self.POST('/manage_addProduct/ERP5/manage_addERP5Site', {
"id": self.site_id,
"erp5_catalog_storage": self.erp5_catalog_storage,
......@@ -330,6 +321,7 @@ class ERP5Updater(object):
def run(self):
""" Keep running until kill"""
while 1:
self.log('INFO', 'Sleeping for %s' % self.short_sleeping_time)
time.sleep(self.short_sleeping_time)
if not self.updateERP5Site():
self.loadSystemSignatureDict()
......
......@@ -24,86 +24,80 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import BaseSlapRecipe
import ConfigParser
import json
import os
import pkg_resources
import zc.buildout
import zc.recipe.egg
import sys
import StringIO
class Recipe(BaseSlapRecipe):
def __init__(self, buildout, name, options):
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
BaseSlapRecipe.__init__(self, buildout, name, options)
from slapos.recipe.librecipe import GenericBaseRecipe
def _install(self):
self.requirements, self.ws = self.egg.working_set()
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
CONFIG = dict()
CONFIG['slapos_directory'] = self.createDataDirectory('slapos')
CONFIG['working_directory'] = self.createDataDirectory('testnode')
CONFIG['test_suite_directory'] = self.createDataDirectory('test_suite')
CONFIG['proxy_host'] = self.getLocalIPv4Address()
CONFIG['proxy_port'] = '5000'
CONFIG['log_directory'] = self.createDataDirectory('testnodelog')
CONFIG['run_directory'] = self.createDataDirectory('testnoderun')
CONFIG['test_suite_title'] = self.parameter_dict.get('test_suite_title')
CONFIG['test_node_title'] = self.parameter_dict.get('test_node_title')
CONFIG['test_suite'] = self.parameter_dict.get('test_suite')
CONFIG['node_quantity'] = self.parameter_dict.get('node_quantity', '1')
CONFIG['project_title'] = self.parameter_dict.get('project_title')
CONFIG['ipv4_address'] = self.getLocalIPv4Address()
CONFIG['ipv6_address'] = self.getGlobalIPv6Address()
CONFIG['test_suite_master_url'] = self.parameter_dict.get(
'test_suite_master_url', None)
CONFIG['git_binary'] = self.options['git_binary']
CONFIG['slapgrid_partition_binary'] = self.options[
'slapgrid_partition_binary']
CONFIG['slapgrid_software_binary'] = self.options[
'slapgrid_software_binary']
CONFIG['slapproxy_binary'] = self.options['slapproxy_binary']
CONFIG['zip_binary'] = self.options['zip_binary']
options = self.options.copy()
del options['recipe']
CONFIG = {k.replace('-', '_'): v for k, v in options.iteritems()}
CONFIG['PATH'] = os.environ['PATH']
additional_bt5_repository_id = \
self.parameter_dict.get('additional_bt5_repository_id')
CONFIG['bt5_path'] = None
if additional_bt5_repository_id is not None:
CONFIG['bt5_path'] = ""
additional_bt5_repository_id_list = additional_bt5_repository_id.split(",")
for id in additional_bt5_repository_id_list:
id_path = os.path.join(CONFIG['slapos_directory'], id)
if CONFIG['bt5_path']:
additional_bt5_repository_id_list = CONFIG['bt5_path'].split(",")
CONFIG['bt5_path'] = ''
for bt5_repository_id in additional_bt5_repository_id_list:
id_path = os.path.join(CONFIG['slapos_directory'], bt5_repository_id)
bt_path = os.path.join(id_path, "bt5")
CONFIG['bt5_path'] += "%s,%s," % (id_path, bt_path)
CONFIG['instance_dict'] = ''
if 'instance_dict' in self.parameter_dict:
CONFIG['instance_dict'] = '[instance_dict]\n'
for k,v in eval(self.parameter_dict['instance_dict']).iteritems():
CONFIG['instance_dict'] += '%s = %s\n' % (k,v)
CONFIG['repository_list'] = ''
if self.options['instance-dict']:
config_instance_dict = ConfigParser.ConfigParser()
config_instance_dict.add_section('instance_dict')
instance_dict = json.loads(self.options['instance-dict'])
for k ,v in instance_dict.iteritems():
config_instance_dict.set('instance_dict', k, v)
value = StringIO.StringIO()
config_instance_dict.write(value)
CONFIG['instance_dict'] = value.getvalue()
vcs_repository_list = json.loads(self.options['repository-list'])
config_repository_list = ConfigParser.ConfigParser()
i = 0
for repository in eval(self.parameter_dict['vcs_repository_list']):
CONFIG['repository_list'] += '[vcs_repository_%s]\n' % i
CONFIG['repository_list'] += 'url = %s\n' % repository['url']
for repository in vcs_repository_list:
section_name = 'vcs_repository_%d' % i
config_repository_list.add_section(section_name)
config_repository_list.set(section_name, 'url', repository['url'])
if 'branch' in repository:
CONFIG['repository_list'] += 'branch = %s\n' % repository['branch']
config_repository_list.set(section_name, 'branch', repository['branch'])
if 'profile_path' in repository:
CONFIG['repository_list'] += 'profile_path = %s\n' % repository[
'profile_path']
config_repository_list.set(section_name, 'profile_path',
repository['profile_path'])
if 'buildout_section_id' in repository:
CONFIG['repository_list'] += 'buildout_section_id = %s\n' % repository[
'buildout_section_id']
CONFIG['repository_list'] += '\n'
config_repository_list.set(section_name, 'buildout_section_id',
repository['buildout_section_id'])
i += 1
testnode_config = self.createConfigurationFile('erp5testnode.cfg',
self.substituteTemplate(pkg_resources.resource_filename(__name__,
'template/erp5testnode.cfg.in'), CONFIG))
testnode_log = os.path.join(self.log_directory, 'erp5testnode.log')
wrapper = zc.buildout.easy_install.scripts([('erp5testnode',
'slapos.recipe.librecipe.execute', 'executee')], self.ws, sys.executable,
self.wrapper_directory, arguments=[[self.options['testnode'], '-l',
testnode_log, testnode_config], {'GIT_SSL_NO_VERIFY': '1'}])[0]
path_list.append(testnode_config)
path_list.append(wrapper)
value = StringIO.StringIO()
config_repository_list.write(value)
CONFIG['repository_list'] = value.getvalue()
configuration_file = self.createFile(
self.options['configuration-file'],
self.substituteTemplate(
self.getTemplateFilename('erp5testnode.cfg.in'),
CONFIG
),
)
path_list.append(configuration_file)
path_list.append(
self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.executee',
[ # Executable
[ self.options['testnode'], '-l', self.options['log-file'],
configuration_file],
# Environment
{
'GIT_SSL_NO_VERIFY': '1',
}
],
)
)
return path_list
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import pkg_resources
import os
import zc.buildout
class Recipe(GenericBaseRecipe):
"""
fontconfig instance configuration.
conf-path -- location of the configuration file
font-system-folder -- fonts installed by software release
font-folder -- location where to download fonts
url-list -- From where to download fonts
"""
def install(self):
created_file_list = []
font_folder = self.options['font-folder']
service_folder = self.options['service-folder']
snippet_filename = self.getTemplateFilename(
'fontconfig-snippet.cfg.in')
font_snippet_list = [self.substituteTemplate(snippet_filename,
dict(font_folder_path=self.options['font-system-folder']))]
font_snippet_list.append(self.substituteTemplate(snippet_filename,
dict(font_folder_path=font_folder)))
config = dict(
font_folder_path_snippet=''.join(font_snippet_list),
)
template_filename = self.getTemplateFilename('fontconfig.cfg.in')
configuration_path = self.createFile(
self.options['conf-path'],
self.substituteTemplate(template_filename, config))
created_file_list.append(configuration_path)
# Instanciate onetimedownloads, one for each url
wrapper_template_location = pkg_resources.resource_filename(
__name__, os.path.join(
'template', 'onetimedownload_run.in'))
onetimedownload_config = {}
onetimedownload_config.update(self.options)
for index, url in enumerate(self.options['url-list'].split()):
if not url.strip():
continue
bin_path = os.path.join(service_folder, 'onetimedownload%s' % index)
file_path = os.path.join(font_folder, '%s' % index)
onetimedownload_config['url'] = url
onetimedownload_config['file_path'] = file_path
onetimedownload_runner_path = self.createExecutable(bin_path,
self.substituteTemplate(wrapper_template_location,
onetimedownload_config))
created_file_list.append(onetimedownload_runner_path)
return created_file_list
<?xml version="1.0"?>
<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
<fontconfig>
%(font_folder_path_snippet)s
</fontconfig>
\ No newline at end of file
#!/bin/sh
# BEWARE: This file is operated by slapgrid
# BEWARE: It will be overwritten automatically
exec %(onetimedownload_path)s "%(url)s" "%(file_path)s"
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericSlapRecipe
import os
import json
import traceback
class Recipe(GenericSlapRecipe):
def _options(self, options):
self.dirname = os.path.join(self.buildout['buildout']['parts-directory'],
self.name)
options['output'] = os.path.join(self.dirname, self.name + '.cfg')
def _generateRealTemplate(self):
# TODO check json against schema
json_data = {}
if self.parameter_dict.get('cloudooo-json', None):
json_data = json.loads(self.parameter_dict['cloudooo-json'])
# dymanic fonts
font_url_list = json_data.get('font_url_list', [])
fontconfig_template = open(self.options['template']).read()
fontconfig = open(self.options['snippet-fontconfig']).read()
fontconfig_extension = fontconfig % dict(font_url_list=' '.join(font_url_list))
with open(self.options['output'], 'w') as f:
f.write(fontconfig_template + fontconfig_extension)
def _install(self):
if not os.path.exists(self.dirname):
os.mkdir(self.dirname)
try:
self._generateRealTemplate()
except Exception:
print 'Ignored issue during template generation:\n%s' % \
traceback.format_exc()
return [self.dirname]
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericSlapRecipe
import os
import json
import traceback
SECTION_BACKEND_PUBLISHER = """[publish-apache-backend-list]
recipe = slapos.cookbook:publish"""
ZOPE_PORT_BASE = 12000
ZEO_PORT_BASE = 15000
HAPROXY_PORT_BASE = 11000
APACHE_PORT_BASE = 10000
class Recipe(GenericSlapRecipe):
def _options(self, options):
self.dirname = os.path.join(self.buildout['buildout']['parts-directory'],
self.name)
options['output'] = os.path.join(self.dirname, self.name + '.cfg')
def _generateRealTemplate(self):
current_zeo_port = ZEO_PORT_BASE
current_zope_port = ZOPE_PORT_BASE
current_apache_port = APACHE_PORT_BASE
current_haproxy_port = HAPROXY_PORT_BASE
json_data = json.loads(self.parameter_dict['json'])
site_id = str(json_data['site-id'])
# prepare zeo
output = ''
part_list = []
zope_dict = {}
zope_connection_dict = {}
known_tid_storage_identifier_dict = {}
snippet_zeo = open(self.options['snippet-zeo']).read()
for zeo_id, zeo_configuration_list in json_data['zeo'].iteritems():
storage_list = []
a = storage_list.append
for zeo_slave in zeo_configuration_list:
zope_connection_dict[zeo_slave['storage-name']] = {
'zope-cache-size': zeo_slave['zope-cache-size'],
'zeo-cache-size': zeo_slave['zeo-cache-size'],
'mount-point': zeo_slave['mount-point'] % {'site-id': site_id},
'storage-name': zeo_slave['storage-name'],
'server': '${zeo-instance-%(zeo-id)s:ip}:${zeo-instance-%(zeo-id)s:port}' % {'zeo-id': zeo_id}
}
zodb_path = os.path.join('${directory:zodb}', zeo_slave['storage-name'] + '.fs')
a(' storage-name=%(storage-name)s zodb-path=%(zodb-path)s' % {'zodb-path': zodb_path, 'storage-name': zeo_slave['storage-name']})
known_tid_storage_identifier_dict[
"((('%(ip)s', %(port)s),), '%(storage_name)s')" % dict(
ip='${zeo-instance-%s:ip}' % zeo_id,
port='${zeo-instance-%s:port}' % zeo_id,
storage_name=zeo_slave['storage-name'])] = (zodb_path, '${directory:zodb-backup}/%s/' % zeo_slave['storage-name'], zeo_slave['serialize-path'] % {'site-id': site_id})
current_zeo_port += 1
output += snippet_zeo % dict(
zeo_id=zeo_id,
zeo_port=current_zeo_port,
storage_list='\n'.join(storage_list)
)
part_list.extend([
"zeo-instance-%s" % zeo_id,
"logrotate-entry-zeo-%s" % zeo_id
])
zeo_connection_list = []
a = zeo_connection_list.append
for k, v in zope_connection_dict.iteritems():
a(' zeo-cache-size=%(zeo-cache-size)s zope-cache-size=%(zope-cache-size)s server=%(server)s mount-point=%(mount-point)s storage-name=%(storage-name)s' % v)
zeo_connection_string = '\n'.join(zeo_connection_list)
zope_dict.update(
timezone=json_data['timezone'],
zeo_connection_string=zeo_connection_string
)
# always one distribution node
current_zope_port += 1
snippet_zope = open(self.options['snippet-zope']).read()
zope_id = 'zope-distribution'
part_list.append(zope_id)
part_list.append('logrotate-entry-%s' % zope_id)
output += snippet_zope % dict(zope_thread_amount=1, zope_id=zope_id,
zope_port=current_zope_port, zope_timeserver=True,
longrequest_logger_file='', longrequest_logger_timeout='',
longrequest_logger_interval='', **zope_dict)
# always one admin node
current_zope_port += 1
zope_id = 'zope-admin'
part_list.append(zope_id)
part_list.append('logrotate-entry-%s' % zope_id)
output += snippet_zope % dict(zope_thread_amount=1, zope_id=zope_id,
zope_port=current_zope_port, zope_timeserver=False,
longrequest_logger_file='', longrequest_logger_timeout='',
longrequest_logger_interval='', **zope_dict)
# handle activity key
for q in range(1, json_data['activity']['zopecount'] + 1):
current_zope_port += 1
part_name = 'zope-activity-%s' % q
part_list.append(part_name)
part_list.append('logrotate-entry-%s' % part_name)
output += snippet_zope % dict(zope_thread_amount=1, zope_id=part_name,
zope_port=current_zope_port, zope_timeserver=True,
longrequest_logger_file='', longrequest_logger_timeout='',
longrequest_logger_interval='', **zope_dict)
# handle backend key
snippet_backend = open(self.options['snippet-backend']).read()
publish_url_list = []
for backend_name, backend_configuration in json_data['backend'].iteritems():
haproxy_backend_list = []
for q in range(1, backend_configuration['zopecount'] + 1):
current_zope_port += 1
part_name = 'zope-%s-%s' % (backend_name, q)
part_list.append(part_name)
part_list.append('logrotate-entry-%s' % part_name)
longrequest_logger = backend_configuration.get("longrequest-logger", None)
if longrequest_logger is not None:
longrequest_part_name = '%s-longrequest' %part_name
longrequest_logger_file = '${basedirectory:log}/%s.log' \
%longrequest_part_name
longrequest_logger_timeout = longrequest_logger.get('timeout', '4')
longrequest_logger_interval = longrequest_logger.get('interval', '2')
else:
longrequest_logger_file = longrequest_logger_timeout = \
longrequest_logger_interval = ''
output += snippet_zope % dict(
zope_thread_amount=backend_configuration['thread-amount'],
zope_id=part_name, zope_port=current_zope_port, zope_timeserver=False,
longrequest_logger_file=longrequest_logger_file,
longrequest_logger_timeout=longrequest_logger_timeout,
longrequest_logger_interval=longrequest_logger_interval,
**zope_dict)
haproxy_backend_list.append('${%(part_name)s:ip}:${%(part_name)s:port}' % dict(part_name=part_name))
# now generate backend access
current_apache_port += 1
current_haproxy_port += 1
part_list.append('apache-%(backend_name)s ca-apache-%(backend_name)s logrotate-entry-apache-%(backend_name)s haproxy-%(backend_name)s' % dict(backend_name=backend_name))
backend_dict = dict(
backend_name=backend_name,
apache_port=current_apache_port,
haproxy_port=current_haproxy_port,
access_control_string=backend_configuration['access-control-string'],
maxconn=backend_configuration['maxconn'],
server_check_path='/%s/getId' % site_id,
haproxy_backend_list=' '.join(haproxy_backend_list)
)
publish_url_list.append('url-%(backend_name)s = https://[${apache-%(backend_name)s:ip}]:${apache-%(backend_name)s:port}' % dict(
backend_name=backend_name))
output += snippet_backend % backend_dict
output += SECTION_BACKEND_PUBLISHER + '\n'
output += '\n'.join(publish_url_list)
part_list.append('publish-apache-backend-list')
prepend = open(self.options['snippet-master']).read() % dict(
part_list=' \n'.join([' '+q for q in part_list]),
known_tid_storage_identifier_dict=known_tid_storage_identifier_dict,
haproxy_section="haproxy-%s" % backend_name,
zope_section=zope_id,
site_id=site_id,
**self.parameter_dict
)
output = prepend + output
with open(self.options['output'], 'w') as f:
f.write(output)
def _install(self):
if not os.path.exists(self.dirname):
os.mkdir(self.dirname)
if not "json" in self.parameter_dict:
# no json transimtted, nothing to do
with open(self.options['output'], 'w') as f:
f.write("[buildout]\nparts =\n")
else:
try:
self._generateRealTemplate()
except Exception:
print 'Ignored issue during template generation:\n%s' % \
traceback.format_exc()
return [self.dirname]
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import zc.buildout
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
conversion_server_dict = dict(
working_path=self.options['data-directory'],
uno_path=self.options['ooo-uno-path'],
office_binary_path=self.options['ooo-binary-path'],
ip=self.options['ip'],
port=int(self.options['port']),
openoffice_port=int(self.options['openoffice-port']),
)
for env_line in self.options['environment'].splitlines():
env_line = env_line.strip()
if not env_line:
continue
if '=' in env_line:
env_key, env_value = env_line.split('=')
conversion_server_dict[env_key.strip()] = env_value.strip()
else:
raise zc.buildout.UserError('Line %r in environment parameter is '
'incorrect' % env_line)
config_file = self.createFile(self.options['configuration-file'],
self.substituteTemplate(self.getTemplateFilename('cloudooo.cfg.in'),
conversion_server_dict))
path_list.append(config_file)
path_list.append(self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute_with_signal_translation',
[self.options['ooo-paster'].strip(), 'serve', config_file]))
return path_list
......@@ -34,6 +34,7 @@ application_hostname = %(ip)s
openoffice_port = %(openoffice_port)s
# LD_LIBRARY_PATH passed to OpenOffice
env-LD_LIBRARY_PATH = %(LD_LIBRARY_PATH)s
env-FONTCONFIG_FILE = %(FONTCONFIG_FILE)s
#
# Mimetype Registry
......
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
ip = self.options['ip']
kumo_manager_port = int(self.options['manager-port'])
kumo_server_port = int(self.options['server-port'])
kumo_server_listen_port = int(self.options['server-listen-port'])
kumo_gateway_port = int(self.options['gateway-port'])
path_list = []
# XXX: kumo is not storing pid in file, unless it is not running as daemon
# but running daemons is incompatible with SlapOS, so there is currently
# no way to have Kumo's pid files to rotate logs and send signals to them
config = dict(
kumo_gateway_binary=self.options['kumo-gateway-binary'],
kumo_gateway_ip=ip,
kumo_gateway_log=self.options['kumo-gateway-log'],
kumo_manager_binary=self.options['kumo-manager-binary'],
kumo_manager_ip=ip,
kumo_manager_log=self.options['kumo-manager-log'],
kumo_server_binary=self.options['kumo-server-binary'],
kumo_server_ip=ip,
kumo_server_log=self.options['kumo-server-log'],
kumo_server_storage=os.path.join(self.options['data-directory'], "kumodb.tch"),
kumo_manager_port=kumo_manager_port,
kumo_server_port=kumo_server_port,
kumo_server_listen_port=kumo_server_listen_port,
kumo_gateway_port=kumo_gateway_port
)
path_list.append(self.createExecutable(self.options['gateway-wrapper'],
self.substituteTemplate(self.getTemplateFilename('kumo_gateway.in'),
config)))
path_list.append(self.createExecutable(self.options['manager-wrapper'],
self.substituteTemplate(self.getTemplateFilename('kumo_manager.in'),
config)))
path_list.append(self.createExecutable(self.options['server-wrapper'],
self.substituteTemplate(self.getTemplateFilename('kumo_server.in'),
config)))
return path_list
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
memcached instance configuration.
wrapper-path -- location of the init script to generate
binary-path -- location of the memcached command
ip -- ip of the memcached server
port -- port of the memcached server
"""
def install(self):
template_filename = self.getTemplateFilename('memcached.in')
config = dict(
memcached_binary=self.options['binary_path'],
memcached_ip=self.options['ip'],
memcached_port=self.options['port'],
)
executable_path = self.createExecutable(
self.options['wrapper_path'],
self.substituteTemplate(self.getTemplateFilename('memcached.in'),
config))
return [executable_path]
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import os
class Recipe(GenericBaseRecipe):
def _options(self, options):
options['password'] = self.generatePassword()
options['test-password'] = self.generatePassword()
for x in xrange(0, int(options['parallel-test-database-amount'])):
options['test-password-%s' % x] = self.generatePassword()
def install(self):
path_list = []
template_filename = self.getTemplateFilename('my.cnf.in')
mysql_conf = dict(
ip=self.options['ip'],
data_directory=self.options['data-directory'],
tcp_port=self.options['port'],
pid_file=self.options['pid-file'],
socket=self.options['socket'],
error_log=self.options['error-log'],
slow_query_log=self.options['slow-query-log'],
mysql_database=self.options['database'],
mysql_user=self.options['user'],
mysql_password=self.options['password'],
mysql_test_database=self.options['test-database'],
mysql_test_user=self.options['test-user'],
mysql_test_password=self.options['test-password'],
)
mysql_binary = self.options['mysql-binary']
socket = self.options['socket'],
mysql_conf_file = self.createFile(
self.options['conf-file'],
self.substituteTemplate(template_filename, mysql_conf)
)
path_list.append(mysql_conf_file)
mysql_script_list = []
# real database
mysql_script_list.append(self.substituteTemplate(
self.getTemplateFilename('initmysql.sql.in'),
{
'mysql_database': mysql_conf['mysql_database'],
'mysql_user': mysql_conf['mysql_user'],
'mysql_password': mysql_conf['mysql_password']
}
))
# default test database
mysql_script_list.append(self.substituteTemplate(
self.getTemplateFilename('initmysql.sql.in'),
{
'mysql_database': mysql_conf['mysql_test_database'],
'mysql_user': mysql_conf['mysql_test_user'],
'mysql_password': mysql_conf['mysql_test_password']
}
))
# parallel test databases
for x in xrange(0, int(self.options['parallel-test-database-amount'])):
mysql_script_list.append(self.substituteTemplate(
self.getTemplateFilename('initmysql.sql.in'),
{
'mysql_database': self.options['mysql-test-database-base'] + '_%s' % x,
'mysql_user': self.options['mysql-test-user-base'] + '_%s' % x,
'mysql_password': self.options['test-password-%s' % x]
}
))
mysql_script_list.append('EXIT')
mysql_script = '\n'.join(mysql_script_list)
mysql_upgrade_binary = self.options['mysql-upgrade-binary']
mysql_update = self.createPythonScript(
self.options['update-wrapper'],
'%s.mysql.updateMysql' % __name__,
[dict(
mysql_script=mysql_script,
mysql_binary=mysql_binary,
mysql_upgrade_binary=mysql_upgrade_binary,
socket=socket,
)]
)
path_list.append(mysql_update)
mysqld_binary = self.options['mysqld-binary']
mysqld = self.createPythonScript(
self.options['wrapper'],
'%s.mysql.runMysql' % __name__,
[dict(
mysql_install_binary=self.options['mysql-install-binary'],
mysqld_binary=mysqld_binary,
data_directory=mysql_conf['data_directory'],
mysql_binary=mysql_binary,
socket=socket,
configuration_file=mysql_conf_file,
)]
)
path_list.append(mysqld)
# backup configuration
full_backup = self.options['full-backup-directory']
incremental_backup = self.options['incremental-backup-directory']
innobackupex_argument_list = [self.options['perl-binary'],
self.options['innobackupex-binary'],
'--defaults-file=%s' % mysql_conf_file,
'--socket=%s' %mysql_conf['socket'].strip(), '--user=root',
'--ibbackup=%s'% self.options['xtrabackup-binary']]
environment = dict(PATH='%s' % self.options['bin-directory'])
innobackupex_incremental = self.createPythonScript(self.options['innobackupex-incremental'], 'slapos.recipe.librecipe.execute.executee', [innobackupex_argument_list + ['--incremental'], environment])
path_list.append(innobackupex_incremental)
innobackupex_full = self.createPythonScript(self.options['innobackupex-full'], 'slapos.recipe.librecipe.execute.executee', [innobackupex_argument_list, environment])
path_list.append(innobackupex_full)
backup_controller = self.createPythonScript(self.options['backup-script'], __name__ + '.innobackupex.controller', [innobackupex_incremental, innobackupex_full, full_backup, incremental_backup])
path_list.append(backup_controller)
# maatkit installation
for pt_script_name in (
'pt-archiver',
'pt-config-diff',
'pt-deadlock-logger',
'pt-duplicate-key-checker',
'pt-fifo-split',
'pt-find',
'pt-fk-error-logger',
'pt-heartbeat',
'pt-index-usage',
'pt-kill',
'pt-log-player',
'pt-online-schema-change',
'pt-query-advisor',
'pt-query-digest',
'pt-show-grants',
'pt-slave-delay',
'pt-slave-find',
'pt-slave-restart',
'pt-table-checksum',
'pt-table-sync',
'pt-tcp-model',
'pt-trend',
'pt-upgrade',
'pt-variable-advisor',
'pt-visual-explain',
):
pt_argument_list = [self.options['perl-binary'],
self.options['%s-binary' % pt_script_name],
'--defaults-file=%s' % mysql_conf_file,
'--socket=%s' %mysql_conf['socket'].strip(), '--user=root',
]
pt_exe = self.createPythonScript(os.path.join(self.options['bin-directory'], pt_script_name), 'slapos.recipe.librecipe.execute.executee', [pt_argument_list, environment])
path_list.append(pt_exe)
return path_list
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import binascii
import hashlib
import os
import re
import zc.buildout
_isurl = re.compile('([a-zA-Z0-9+.-]+)://').match
# based on Zope2.utilities.mkzopeinstance.write_inituser
def Zope2InitUser(path, username, password):
open(path, 'w').write('')
os.chmod(path, 0600)
open(path, "w").write('%s:{SHA}%s\n' % (
username,binascii.b2a_base64(hashlib.sha1(password).digest())[:-1]))
class Recipe(GenericBaseRecipe):
def _options(self, options):
options['password'] = self.generatePassword()
options['deadlock-password'] = self.generatePassword()
def install(self):
"""
All zope have to share file created by portal_classes
(until everything is integrated into the ZODB).
So, do not request zope instance and create multiple in the same partition.
"""
path_list = []
Zope2InitUser(self.options['inituser'], self.options['user'],
self.options['password'])
# Symlink to BT5 repositories defined in instance config.
# Those paths will eventually end up in the ZODB, and having symlinks
# inside the XXX makes it possible to reuse such ZODB with another software
# release[ version].
# Note: this path cannot be used for development, it's really just a
# read-only repository.
repository_path = self.options['bt5-repository']
self.bt5_repository_list = []
append = self.bt5_repository_list.append
for repository in self.options.get('bt5-repository-list', '').split():
repository = repository.strip()
if not repository:
continue
if _isurl(repository) and not repository.startswith("file://"):
# XXX: assume it's a valid URL
append(repository)
continue
if repository.startswith('file://'):
repository = repository.replace('file://', '', '')
if os.path.isabs(repository):
repo_id = hashlib.sha1(repository).hexdigest()
link = os.path.join(repository_path, repo_id)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(repository, link)
self.logger.debug('Created link %r -> %r' % (link, repository_path))
# Always provide a URL-Type
append("file://" + link)
# Create zope configuration file
zope_config = dict(
products=self.options['products'],
thread_amount=self.options['thread-amount'],
zodb_root_path=self.options['zodb-path'],
zodb_cache_size=int(self.options['zodb-cache-size']),
)
zope_environment = dict(
TMP=self.options['tmp-path'],
TMPDIR=self.options['tmp-path'],
HOME=self.options['tmp-path'],
PATH=self.options['bin-path']
)
# configure default Zope2 zcml
open(self.options['site-zcml'], 'w').write(open(self.getTemplateFilename(
'site.zcml')).read())
zope_config['instance'] = self.options['instance-path']
zope_config['event_log'] = self.options['event-log']
zope_config['z2_log'] = self.options['z2-log']
zope_config['pid-filename'] = self.options['pid-file']
zope_config['lock-filename'] = self.options['lock-file']
prefixed_products = []
for product in reversed(zope_config['products'].split()):
product = product.strip()
if product:
prefixed_products.append('products %s' % product)
prefixed_products.insert(0, 'products %s' % self.options[
'instance-products'])
zope_config['products'] = '\n'.join(prefixed_products)
zope_config['address'] = '%s:%s' % (self.options['ip'], self.options['port'])
zope_config.update(dump_url=self.options['deadlock-path'],
secret=self.options['deadlock-password'])
zope_wrapper_template_location = self.getTemplateFilename('zope.conf.in')
zope_conf_content = self.substituteTemplate(zope_wrapper_template_location,
zope_config)
zope_conf_path = self.createFile(self.options['configuration-file'], zope_conf_content)
path_list.append(zope_conf_path)
# Create init script
path_list.append(self.createPythonScript(self.options['wrapper'], 'slapos.recipe.librecipe.execute.executee', [[self.options['runzope-binary'].strip(), '-C', zope_conf_path], zope_environment]))
return path_list
## Zope 2 configuration file generated by SlapOS
# Some defines
%%define INSTANCE %(instance)s
instancehome $INSTANCE
# Used products
%(products)s
# Environment is setup in running wrapper script
# Reason: zope.conf is read too late for some componets
# No need to debug
debug-mode off
# One thread is safe enough
zserver-threads %(thread_amount)s
# File location
pid-filename %(pid-filename)s
lock-filename %(lock-filename)s
# Temporary storage database (for sessions)
<zodb_db temporary>
<temporarystorage>
name temporary storage for sessioning
</temporarystorage>
mount-point /temp_folder
container-class Products.TemporaryFolder.TemporaryContainer
</zodb_db>
# Logging configuration
<eventlog>
<logfile>
dateformat
path %(event_log)s
</logfile>
</eventlog>
<logger access>
<logfile>
dateformat
path %(z2_log)s
</logfile>
</logger>
# Serving configuration
<http-server>
address %(address)s
</http-server>
# ZODB configuration
<zodb_db root>
cache-size %(zodb_cache_size)d
<filestorage>
path %(zodb_root_path)s
</filestorage>
mount-point /
</zodb_db>
<zoperunner>
program $INSTANCE/bin/runzope
</zoperunner>
# DeadlockDebugger configuration
<product-config DeadlockDebugger>
dump_url %(dump_url)s
secret %(secret)s
</product-config>
# ERP5 Timer Service
%%import timerserver
<timer-server>
interval 5
</timer-server>
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import binascii
import hashlib
import os
import re
import zc.buildout
_isurl = re.compile('([a-zA-Z0-9+.-]+)://').match
# based on Zope2.utilities.mkzopeinstance.write_inituser
def Zope2InitUser(path, username, password):
open(path, 'w').write('')
os.chmod(path, 0600)
open(path, "w").write('%s:{SHA}%s\n' % (
username,binascii.b2a_base64(hashlib.sha1(password).digest())[:-1]))
class Recipe(GenericBaseRecipe):
def _options(self, options):
options['password'] = self.generatePassword()
options['deadlock-password'] = self.generatePassword()
def install(self):
"""
All zope have to share file created by portal_classes
(until everything is integrated into the ZODB).
So, do not request zope instance and create multiple in the same partition.
"""
path_list = []
Zope2InitUser(self.options['inituser'], self.options['user'],
self.options['password'])
# Symlink to BT5 repositories defined in instance config.
# Those paths will eventually end up in the ZODB, and having symlinks
# inside the XXX makes it possible to reuse such ZODB with another software
# release[ version].
# Note: this path cannot be used for development, it's really just a
# read-only repository.
repository_path = self.options['bt5-repository']
self.bt5_repository_list = []
append = self.bt5_repository_list.append
for repository in self.options.get('bt5-repository-list', '').split():
repository = repository.strip()
if not repository:
continue
if _isurl(repository) and not repository.startswith("file://"):
# XXX: assume it's a valid URL
append(repository)
continue
if repository.startswith('file://'):
repository = repository.replace('file://', '', '')
if os.path.isabs(repository):
repo_id = hashlib.sha1(repository).hexdigest()
link = os.path.join(repository_path, repo_id)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(repository, link)
self.logger.debug('Created link %r -> %r' % (link, repository_path))
# Always provide a URL-Type
append("file://" + link)
# Generate Zeo connections
zeo_snippet_template = open(self.getTemplateFilename('zope.zeo.entry.conf.in'
)).read()
zeo_snippet_list = []
for zeo_line in self.options['zeo-connection-string'].splitlines():
zeo_line.strip()
if not zeo_line:
continue
d = dict()
for param in zeo_line.split():
k, v = param.split('=')
d[k.strip()] = v.strip()
zeo_snippet_list.append(zeo_snippet_template % d)
# Create zope configuration file
zope_config = dict(
products=self.options['products'],
thread_amount=self.options['thread-amount'],
zodb_configuration='\n'.join(zeo_snippet_list)
)
zope_environment = dict(
TMP=self.options['tmp-path'],
TMPDIR=self.options['tmp-path'],
HOME=self.options['tmp-path'],
PATH=self.options['bin-path'],
TIMEZONE=self.options['timezone'],
)
# longrequestlogger product which requires environment settings
longrequest_logger_file = self.options.get('longrequest-logger-file', None)
longrequest_logger_timeout = \
self.options.get('longrequest-logger-timeout', None)
longrequest_logger_interval= \
self.options.get('longrequest-logger-interval', None)
if longrequest_logger_file:
# add needed zope configuration
zope_environment.update(
**dict(longrequestlogger_file = longrequest_logger_file,
longrequestlogger_timeout = longrequest_logger_timeout,
longrequestlogger_interval = longrequest_logger_interval))
# configure default Zope2 zcml
open(self.options['site-zcml'], 'w').write(open(self.getTemplateFilename(
'site.zcml')).read())
zope_config['instance'] = self.options['instance-path']
zope_config['event_log'] = self.options['event-log']
zope_config['z2_log'] = self.options['z2-log']
zope_config['pid-filename'] = self.options['pid-file']
zope_config['lock-filename'] = self.options['lock-file']
prefixed_products = []
for product in reversed(zope_config['products'].split()):
product = product.strip()
if product:
prefixed_products.append('products %s' % product)
prefixed_products.insert(0, 'products %s' % self.options[
'instance-products'])
zope_config['products'] = '\n'.join(prefixed_products)
zope_config['address'] = '%s:%s' % (self.options['ip'], self.options['port'])
zope_config.update(dump_url=self.options['deadlock-path'],
secret=self.options['deadlock-password'])
zope_wrapper_template_location = self.getTemplateFilename('zope.conf.in')
zope_conf_content = self.substituteTemplate(zope_wrapper_template_location,
zope_config)
if self.isTrueValue(self.options['timeserver']):
zope_conf_content += self.substituteTemplate(self.getTemplateFilename(
'zope.conf.timeserver.in'), {})
zope_conf_path = self.createFile(self.options['configuration-file'], zope_conf_content)
path_list.append(zope_conf_path)
# Create init script
path_list.append(self.createPythonScript(self.options['wrapper'], 'slapos.recipe.librecipe.execute.executee', [[self.options['runzope-binary'].strip(), '-C', zope_conf_path], zope_environment]))
return path_list
<configure
xmlns="http://namespaces.zope.org/zope"
xmlns:meta="http://namespaces.zope.org/meta"
xmlns:five="http://namespaces.zope.org/five">
<include package="Products.Five" />
<meta:redefinePermission from="zope2.Public" to="zope.Public" />
<!-- Load the meta -->
<include files="package-includes/*-meta.zcml" />
<five:loadProducts file="meta.zcml"/>
<!-- Load the configuration -->
<include files="package-includes/*-configure.zcml" />
<five:loadProducts />
<!-- Load the configuration overrides-->
<includeOverrides files="package-includes/*-overrides.zcml" />
<five:loadProductsOverrides />
<securityPolicy
component="Products.Five.security.FiveSecurityPolicy" />
</configure>
......@@ -31,12 +31,14 @@ lock-filename %(lock-filename)s
# Logging configuration
<eventlog>
level info
<logfile>
dateformat
path %(event_log)s
</logfile>
</eventlog>
<logger access>
level WARN
<logfile>
dateformat
path %(z2_log)s
......@@ -49,7 +51,14 @@ lock-filename %(lock-filename)s
</http-server>
# ZODB configuration
%(zodb_configuration_string)s
%(zodb_configuration)s
<zoperunner>
program $INSTANCE/bin/runzope
</zoperunner>
# DeadlockDebugger configuration
<product-config DeadlockDebugger>
dump_url %(dump_url)s
secret %(secret)s
</product-config>
<zodb_db %(storage-name)s>
cache-size %(zope-cache-size)s
mount-point %(mount-point)s
<zeoclient>
cache-size %(zeo-cache-size)s
server %(server)s
storage %(storage-name)s
name %(storage-name)s
</zeoclient>
</zodb_db>
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
haproxy instance configuration.
name -- local name of the haproxy
wrapper-path -- location of the init script to generate
binary-path -- location of the haproxy command
conf-path -- location of the configuration file
ip -- ip of the haproxy server
port -- port of the haproxy server
server-check-path -- path of the domain to check
address -- string with list of all url to check
Example: 127.0.0.1:12004 127.0.0.1:12005
"""
def install(self):
# inter must be quite short in order to detect quickly an unresponsive node
# and to detect quickly a node which is back
# rise must be minimal possible : 1, indeed, a node which is back don't need
# to sleep more time and we can give him work immediately
# fall should be quite sort. with inter at 3, and fall at 2, a node will be
# considered as dead after 6 seconds.
# maxconn should be set as the maximum thread we have per zope, like this
# haproxy will manage the queue of request with the possibility to
# move a request to another node if the initially selected one is dead
# maxqueue is the number of waiting request in the queue of every zope client.
# It allows to make sure that there is not a zope client handling all
# the work while other clients are doing nothing. This was happening
# even thoug we have round robin distribution because when a node dies
# some seconds, all request are dispatched to other nodes, and then users
# stick in other nodes and are not coming back. Please note this option
# is not an issue if you have more than (maxqueue * node_quantity) requests
# because haproxy will handle a top-level queue
snippet_filename = self.getTemplateFilename(
'haproxy-server-snippet.cfg.in')
# Prepare all filestorages
server_snippet = ""
i = 0
name = self.options['name']
for address in self.options['backend-list'].split():
i += 1
server_snippet += self.substituteTemplate(
snippet_filename, dict(
name='%s_%s' % (name, i),
address=address,
cluster_zope_thread_amount=self.options['maxconn']))
config = dict(
name=name,
ip=self.options['ip'],
port=self.options['port'],
server_text=server_snippet,
server_check_path=self.options['server-check-path'],)
template_filename = self.getTemplateFilename('haproxy.cfg.in')
configuration_path = self.createFile(
self.options['conf-path'],
self.substituteTemplate(template_filename, config))
# Create running wrapper
wrapper_path = self.createPythonScript(
self.options['wrapper-path'],
'slapos.recipe.librecipe.execute.execute',
arguments=[self.options['binary-path'].strip(), '-f', configuration_path],)
return [configuration_path, wrapper_path]
server %(name)s %(address)s cookie %(name)s check inter 3s rise 1 fall 2 maxqueue 5 maxconn %(cluster_zope_thread_amount)s
......@@ -39,6 +39,7 @@ import urlparse
# Use to do from slapos.recipe.librecipe import GenericBaseRecipe
from generic import GenericBaseRecipe
from genericslap import GenericSlapRecipe
from filehash import filehash
class BaseSlapRecipe:
"""Base class for all slap.recipe.*"""
......
......@@ -4,6 +4,46 @@ import signal
import subprocess
import time
import inotifyx
def _wait_files_creation(file_list):
# Etablish a list of directory and subfiles
directories = dict()
for dirname, filename in [os.path.split(f) for f in file_list]:
directories.setdefault(dirname, dict())
directories[dirname][filename] = False
def all_files_exists():
return all([all(files.values()) for files in directories.values()])
fd = inotifyx.init()
try:
# Watch every directories where the file are
watchdescriptors = dict()
for dirname in directories.keys():
wd = inotifyx.add_watch(fd,
dirname,
inotifyx.IN_CREATE | inotifyx.IN_DELETE)
watchdescriptors[wd] = dirname
# Set to True the file wich exists
for dirname, filename in [os.path.split(f) for f in file_list]:
directories[dirname][filename] = os.path.exists(os.path.join(dirname,
filename))
# Let's wait for every file creation
while not all_files_exists():
events_list = inotifyx.get_events(fd)
for event in events_list:
dirname = watchdescriptors[event.wd]
if event.name in directories[dirname]:
# One of watched file was created or deleted
if event.mask & inotifyx.IN_DELETE:
directories[dirname][event.name] = False
else:
directories[dirname][event.name] = True
finally:
os.close(fd)
def execute(args):
"""Portable execution with process replacement"""
......@@ -14,18 +54,7 @@ def execute_wait(args):
"""Execution but after all files in args[1] exists"""
exec_list = list(args[0])
file_list = list(args[1])
sleep = 60
while True:
ready = True
for f in file_list:
if not os.path.exists(f):
print 'File %r does not exists, sleeping for %s' % (f, sleep)
ready = False
if ready:
break
# XXX: It's the same as ../ca/certificate_authoritiy.py
# We should use pyinotify as well. Or select() on socket.
time.sleep(sleep)
_wait_files_creation(file_list)
os.execv(exec_list[0], exec_list + sys.argv[1:])
......@@ -49,16 +78,7 @@ def executee_wait(args):
env = os.environ.copy()
for k,v in environment.iteritems():
env[k] = v
sleep = 60
while True:
ready = True
for f in file_list:
if not os.path.exists(f):
print 'File %r does not exists, sleeping for %s' % (f, sleep)
ready = False
if ready:
break
time.sleep(sleep)
_wait_files_creation(file_list)
os.execve(exec_list[0], exec_list + sys.argv[1:], env)
def sig_handler(signal, frame):
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import hashlib
import shutil
import os
DEFAULT_HASH = 'sha512'
class Hash(object):
def __init__(self, name):
self._hash = hashlib.new(name)
def write(self, data):
self._hash.update(data)
def read(self):
return self._hash.hexdigest()
def filehash(filename, type_=DEFAULT_HASH):
if not os.path.isfile(filename):
raise ValueError("%r isn't a file" % filename)
digest = Hash(type_)
with open(filename, 'r') as file_:
shutil.copyfileobj(file_, digest)
return digest.read()
# Home made hashdeep <http://md5deep.sourceforge.net/>
def dirhash(dirname, type_=DEFAULT_HASH):
"""Walk into a directory an return a unique hash for
the directory structure and its files content."""
if not os.path.isdir(dirname):
raise ValueError("%r isn't a directory" % dirname)
digest = Hash(type_)
# List the directory structure
path_list = []
for dirname, dirlist, filelist in os.walk(dirname, followlinks=False):
for filename in filelist:
path_list.append(os.path.join(dirname, filename))
path_list.sort()
for path in path_list:
# Change the hash even if the file or the directory is empty
digest.write(path)
# Update the hash with file content
if os.path.isfile(path):
with open(path, 'r') as file_:
shutil.copyfileobj(file_, digest)
return digest.read()
def pathhash(path, type_=DEFAULT_HASH):
if os.path.isdir(path):
return dirhash(path, type_)
elif os.path.isfile(path):
return filehash(path, type_)
raise ValueError("%r isn't a directory nor a file" % path)
# you can use python -m slapos.recipe.librecipe.filehash [hash] path
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
raise ValueError("Not enough command line arguments")
if len(sys.argv) == 2:
print sys.argv[1], '-', pathhash(sys.argv[1])
else:
print sys.argv[2], '-', pathhash(sys.argv[2], sys.argv[1])
......@@ -28,6 +28,9 @@ import logging
import os
import sys
import inspect
import re
import urllib
import urlparse
import pkg_resources
import zc.buildout
......@@ -39,11 +42,12 @@ class GenericBaseRecipe(object):
def __init__(self, buildout, name, options):
"""Recipe initialisation"""
self.name = name
self.options = options
self.buildout = buildout
self.logger = logging.getLogger(name)
self.options = options.copy() # If _options use self.optionIsTrue
self._options(options) # Options Hook
self.options = options.copy() # Updated options dict
self._ws = self.getWorkingSet()
......@@ -98,9 +102,17 @@ class GenericBaseRecipe(object):
path, arguments=arguments)[0]
return script
def createDirectory(self, parent, name, mode=0700):
path = os.path.join(parent, name)
if not os.path.exists(path):
os.mkdir(path, mode)
elif not os.path.isdir(path):
raise OSError("%r exists but is not a directory." % name)
return path
def substituteTemplate(self, template_location, mapping_dict):
"""Read from file template_location an substitute content with
mapping_dict douing a dummy python format."""
mapping_dict doing a dummy python format."""
with open(template_location, 'r') as template:
return template.read() % mapping_dict
......@@ -120,4 +132,38 @@ class GenericBaseRecipe(object):
return str(value).lower() in GenericBaseRecipe.TRUE_VALUES
def optionIsTrue(self, optionname, default=None):
if default is not None and optionname not in self.options:
return default
return self.isTrueValue(self.options[optionname])
def unparseUrl(self, scheme, host, path='', params='', query='',
fragment='', port=None, auth=None):
"""Join a url with auth, host, and port.
* auth can be either a login string or a tuple (login, password).
* if the host is an ipv6 address, brackets will be added to surround it.
"""
# XXX-Antoine: I didn't find any standard module to join an url with
# login, password, ipv6 host and port.
# So instead of copy and past in every recipe I factorized it right here.
netloc = ''
if auth is not None:
auth = tuple(auth)
netloc = urllib.quote(str(auth[0])) # Login
if len(auth) > 1:
netloc += ':%s' % urllib.quote(auth[1]) # Password
netloc += '@'
# host is an ipv6 address whithout brackets
if ':' in host and not re.match(r'^\[.*\]$', host):
netloc += '[%s]' % host
else:
netloc += str(host)
if port is not None:
netloc += ':%s' % port
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
return url
......@@ -24,23 +24,17 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import logging
from slapos import slap
import zc.buildout
import zc.recipe.egg
import time
import re
import urlparse
class GenericSlapRecipe(object):
from generic import GenericBaseRecipe
class GenericSlapRecipe(GenericBaseRecipe):
"""Base class for all slap.recipe.*"""
def __init__(self, buildout, name, options):
"""Default initialisation"""
self.name = name
options['eggs'] = 'slapos.cookbook'
self.options = options
self.logger = logging.getLogger(self.name)
GenericBaseRecipe.__init__(self, buildout, name, options)
self.slap = slap.slap()
# SLAP related information
......@@ -52,21 +46,6 @@ class GenericSlapRecipe(object):
self.key_file = slap_connection.get('key-file')
self.cert_file = slap_connection.get('cert-file')
# setup egg to give possibility to generate scripts
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
# Hook options
self._options(options)
# setup auto uninstall/install
self._setupAutoInstallUninstall()
def _setupAutoInstallUninstall(self):
"""By default SlapOS recipes are reinstalled each time"""
# Note: It is possible to create in future subclass which will do no-op in
# this method
self.options['slapos-timestamp'] = str(time.time())
def install(self):
self.slap.initializeConnection(self.server_url, self.key_file,
self.cert_file)
......@@ -88,42 +67,6 @@ class GenericSlapRecipe(object):
"""Hook which shall be implemented in children class"""
raise NotImplementedError('Shall be implemented by subclass')
def _options(self, options):
"""Hook which can be implemented in children class"""
pass
def setConnectionUrl(self, *args, **kwargs):
url = self._unparseUrl(*args, **kwargs)
url = self.unparseUrl(*args, **kwargs)
self.setConnectionDict(dict(url=url))
def _unparseUrl(self, scheme, host, path='', params='', query='',
fragment='', port=None, auth=None):
"""Join a url with auth, host, and port.
* auth can be either a login string or a tuple (login, password).
* if the host is an ipv6 address, brackets will be added to surround it.
"""
# XXX-Antoine: I didn't find any standard module to join an url with
# login, password, ipv6 host and port.
# So instead of copy and past in every recipe I factorized it right here.
netloc = ''
if auth is not None:
auth = tuple(auth)
netloc = str(auth[0]) # Login
if len(auth) > 1:
netloc += ':%s' % auth[1] # Password
netloc += '@'
# host is an ipv6 address whithout brackets
if ':' in host and not re.match(r'^\[.*\]$', host):
netloc += '[%s]' % host
else:
netloc += str(host)
if port is not None:
netloc += ':%s' % port
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
return url
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import inotifyx
def subfiles(directory):
"""Return the list of subfiles of a directory, and wait for the newly created
ones.
CAUTION : *DONT TRY TO CONVERT THE RESULT OF THIS FUNCTION INTO A LIST !
ALWAYS ITERATE OVER IT !!!*"""
watchfd = inotifyx.init()
inotifyx.add_watch(watchfd, directory, inotifyx.IN_CREATE)
try:
subfiles = set(os.listdir(directory))
subfiles |= set([file_.name for file_ in inotifyx.get_events(watchfd, 0)])
while True:
for file_ in subfiles:
yield os.path.join(directory, file_)
subfiles = [file_.name for file_ in inotifyx.get_events(watchfd)]
finally:
os.close(watchfd)
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import sys
import subprocess
import inotifyx
from slapos.recipe.librecipe import GenericBaseRecipe
class LockFile(object):
class LockException(Exception):
pass
def __init__(self, filename, wait=True, exit=False):
self.filename = filename
if wait:
self.callback = lambda: self.waitDeletion()
elif not exit:
self.callback = lambda: self.raiseException()
else:
self.callback = lambda: sys.exit(1)
def raiseException(self):
raise LockFile.LockException("Not able to lock the file")
def waitDeletion(self):
inotify_fd = inotifyx.init()
try:
inotifyx.add_watch(inotify_fd, self.filename, inotifyx.IN_DELETE)
inotifyx.get_events(inotify_fd)
except IOError: # add_watch failed
pass
finally:
os.close(inotify_fd)
self.__enter__()
def __enter__(self):
try:
# Atomic file acquisition
self._fd = os.open(self.filename, os.O_CREAT | os.O_EXCL)
except OSError:
self.callback()
def __exit__(self, exc_type, exc_value, traceback):
os.close(self._fd)
os.unlink(self.filename)
def locked_run(args):
with LockFile(args['filename'], wait=args['wait'], exit=True):
subprocess.check_call([args['binary']])
class Recipe(GenericBaseRecipe):
def install(self):
wrapper = self.createPythonScript(self.options['wrapper'],
__name__ + '.locked_run',
dict(
filename=self.options['lock-file'],
wait=self.optionIsTrue('wait', False),
binary=self.options['binary'],
)
)
return [wrapper]
......@@ -30,87 +30,55 @@ from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def _options(self, options):
if 'name' not in options:
options['name'] = self.name
def install(self):
path_list = []
logrotate_backup = self.options['backup']
logrotate_d = self.options['logrotate-entries']
logrotate_conf_file = self.options['conf']
logrotate_conf = []
logrotate_conf.append("include %s" % logrotate_d)
logrotate_conf.append("olddir %s" % logrotate_backup)
logrotate_conf.append("dateext")
frequency = 'daily'
if 'frequency' in self.options:
frequency = self.options['frequency']
logrotate_conf.append(frequency)
num_rotate = 30
if 'num-rotate' in self.options:
num_rotate = self.options['num-rotate']
logrotate_conf.append("rotate %s" % num_rotate)
logrotate_conf.append("compress")
logrotate_conf.append("compresscmd %s" % self.options['gzip-binary'])
logrotate_conf.append("compressoptions -9")
logrotate_conf.append("uncompresscmd %s" % self.options['gunzip-binary'])
logrotate_conf_file = self.createFile(logrotate_conf_file, '\n'.join(logrotate_conf))
logrotate_conf.append(logrotate_conf_file)
logrotate_conf = [
'daily',
'dateext',
'rotate 3650',
'compress',
'compresscmd %s' % self.options['gzip-binary'],
'compressoptions -9',
'uncompresscmd %s' % self.options['gunzip-binary'],
'notifempty',
'sharedscripts',
'create',
'include %s' % logrotate_d,
'olddir %s' % logrotate_backup,
]
logrotate_conf_file = self.createFile(logrotate_conf_file,
'\n'.join(logrotate_conf))
state_file = self.options['state-file']
logrotate = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.exceute.execute',
'slapos.recipe.librecipe.execute.execute',
[self.options['logrotate-binary'], '-s', state_file, logrotate_conf_file, ]
)
path_list.append(logrotate)
return path_list
return [logrotate, logrotate_conf_file]
class Part(GenericBaseRecipe):
def _options(self, options):
if 'name' not in options:
options['name'] = self.name
def install(self):
logrotate_d = self.options['logrotate-entries']
part_path = os.path.join(logrotate_d, self.options['name'])
conf = []
if 'frequency' in self.options:
conf.append(self.options['frequency'])
if 'num-rotate' in self.options:
conf.append('rotate %s' % self.options['num-rotate'])
if 'post' in self.options:
conf.append("postrotate\n%s\nendscript" % self.options['post'])
if 'pre' in self.options:
conf.append("prerotate\n%s\nendscript" % self.options['pre'])
if self.optionIsTrue('sharedscripts', False):
conf.append("sharedscripts")
if self.optionIsTrue('notifempty', False):
conf.append('notifempty')
if self.optionIsTrue('create', True):
conf.append('create')
log = self.options['log']
self.createFile(os.path.join(logrotate_d, self.options['name']),
part_path = self.createFile(os.path.join(logrotate_d, self.options['name']),
"%(logfiles)s {\n%(conf)s\n}" % {
'logfiles': log,
'conf': '\n'.join(conf),
......
......@@ -42,7 +42,7 @@ class Recipe(GenericBaseRecipe):
def install(self):
for directory in self.directory.values():
for directory in sorted(self.directory.values()):
path = directory
if not os.path.exists(path):
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
def dump(args):
mydumper_cmd = [args['mydumper']]
mydumper_cmd.extend(['-B', args['database']])
if args['socket'] is not None:
mydumper_cmd.extend(['-S', args['socket']])
else:
mydumper_cmd.extend(['-h', args['host']])
mydumper_cmd.etxned(['-P', args['port']])
mydumper_cmd.extend(['-u', args['user']])
if args['password'] is not None:
mydumper_cmd.extend(['-p', args['password']])
if args['compression']:
mydumper_cmd.append('--compress')
if args['rows'] is not None:
mydumper_cmd.extend(['-r', args['rows']])
mydumper_cmd.extend(['-o', args['directory']])
subprocess.check_call(mydumper_cmd)
def do_import(args):
mydumper_cmd = [args['mydumper']]
mydumper_cmd.extend(['-B', args['database']])
if args['socket'] is not None:
mydumper_cmd.extend(['-S', args['socket']])
else:
mydumper_cmd.extend(['-h', args['host']])
mydumper_cmd.etxned(['-P', args['port']])
mydumper_cmd.extend(['-u', args['user']])
if args['password'] is not None:
mydumper_cmd.extend(['-p', args['password']])
mydumper_cmd.append('--overwrite-tables')
mydumper_cmd.extend(['-d', args['directory']])
subprocess.check_call(mydumper_cmd)
class Recipe(GenericBaseRecipe):
def install(self):
# Host or socket should be defined
try:
self.options['host']
except:
self.options['socket']
config = dict(database=self.options['database'],
socket=self.options.get('socket'),
host=self.options.get('host'),
port=self.options.get('port', 3306),
directory=self.options['backup-directory'],
user=self.options['user'],
password=self.options.get('password'),
)
name = __name__
if self.optionIsTrue('import', False):
config.update(mydumper=self.options['myloader-binary'])
name += '.do_import'
else:
config.update(mydumper=self.options['mydumper-binary'],
compression=self.optionIsTrue('compression', default=False),
rows=self.options.get('rows'),
)
name += '.dump'
wrapper = self.createPythonScript(self.options['wrapper'],
name,
config)
return [wrapper]
......@@ -24,8 +24,12 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import os
import sys
import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
from slapos.recipe.librecipe import filehash
class Recipe(GenericBaseRecipe):
......@@ -44,7 +48,6 @@ class Recipe(GenericBaseRecipe):
pid_file=self.options['pid-file'],
socket=self.options['socket'],
error_log=self.options['error-log'],
slow_query_log=self.options['slow-query-log'],
mysql_database=self.options['database'],
mysql_user=self.options['user'],
mysql_password=self.options['password'],
......@@ -109,28 +112,16 @@ class Recipe(GenericBaseRecipe):
path_list.append(mysqld)
# backup configuration
mysqldump_binary = self.options['mysqldump-binary']
backup_directory = self.options['backup-directory']
pending_backup_dir = self.options['backup-pending-directory']
dump_filename = self.options['dumpname']
mysqldump_cmd = [mysqldump_binary,
mysql_conf['mysql_database'],
'-u', 'root',
'-S', mysql_conf['socket'].strip(),
'--single-transaction', '--opt',
]
dump_file = os.path.join(backup_directory, dump_filename)
tmpdump_file = os.path.join(pending_backup_dir, dump_filename)
if self.optionIsTrue('backup', default=False):
backup_script = self.createPythonScript(
self.options['backup-script'],
'%s.backup.do_backup' % __name__,
{
'mysqldump': mysqldump_cmd,
'gzip': self.options['gzip-binary'],
'tmpdump': tmpdump_file,
'dumpfile': dump_file,
},
'%s.do_backup' % __name__,
dict(
mydumper_binary=self.options['mydumper-binary'],
database=mysql_conf['mysql_database'],
socket=mysql_conf['socket'],
backup_directory=self.options['backup-directory']
),
)
path_list.append(backup_script)
......@@ -138,7 +129,7 @@ class Recipe(GenericBaseRecipe):
if self.optionIsTrue('recovering', default=False):
recovering_script = self.createPythonScript(
self.options['recovering-wrapper'],
'%s.recover.import_remote_dump' % __name__,
'%s.import_dump' % __name__,
{
'lock_file': os.path.join(self.work_directory,
'import_done'),
......@@ -156,3 +147,47 @@ class Recipe(GenericBaseRecipe):
return path_list
# Replace zcat dump.sql.gz | mysql
def import_dump(args):
# Get data from kwargs
cache_file = args['cache_file']
database = args['database']
mysql_binary = args['mysql_binary']
mysql_socket = args['mysql_socket']
dump_file = args['dump_file']
zcat_binary = args['zcat_binary']
sha512sum = filehash(dump_file)
with open(cache_file, 'r') as cache_fileobj:
last_sha512sum = cache_fileobj.read().strip()
if sha512sum != last_sha512sum:
zcat = subprocess.Popen([zcat_binary, dump_file], stdout=subprocess.PIPE)
mysql = subprocess.Popen([mysql_binary, '--socket=%s' % mysql_socket, '-D',
database, '-u', 'root'], stdin=zcat.stdout)
zcat.stdout.close()
returncode = mysql.wait()
if returncode == 0:
with open(cache_file, 'w') as cache_fileobj:
cache_fileobj.write(sha512sum)
sys.exit(returncode)
def promise(args):
# This is not a dependency of slapos.cookbook, because it shall be runned
# in a python environment having mysql-python
import MySQLdb
user = args['user']
password = args['password']
db = args['db']
host = args['host']
port = args['port']
db = MySQLdb.connect(host=host, port=port, user=user, passwd=password,
db=db)
cursor = db.cursor()
cursor.close()
import subprocess
import os
# Replace mysqldump | gzip > tmpdump && mv -f tmpdump dumpfile
def do_backup(kwargs):
mysqldump_cmd = kwargs['mysqldump']
gzip_bin = kwargs['gzip']
tmpdump = kwargs['tmpdump']
dumpfile = kwargs['dumpfile']
# mysqldump | gzip > tmpdump
with open(tmpdump, 'w') as output:
mysqldump = subprocess.Popen(mysqldump_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
gzip = subprocess.Popen([gzip_bin],
stdin=mysqldump.stdout,
stdout=output,
stderr=subprocess.STDOUT)
mysqldump.stdout.close()
if gzip.wait() != 0:
raise ValueError("Gzip return a non zero value.")
os.rename(tmpdump, dumpfile)
import os
import sys
import time
def catdatefile(args):
directory = args[0]
try:
suffix = args[1]
except IndexError:
suffix = '.log'
f = open(os.path.join(directory,
time.strftime('%Y-%m-%d.%H:%M.%s') + suffix), 'aw')
for line in sys.stdin.read():
f.write(line)
f.close()
import sys
import os
import time
import subprocess
def import_remote_dump(kwargs):
# Get data from kwargs
lock_file = kwargs['lock_file']
database = kwargs['database']
mysql_binary = kwargs['mysql_binary']
mysql_socket = kwargs['mysql_socket']
duplicity_binary = kwargs['duplicity_binary']
remote_backup = kwargs['remote_backup']
local_directory = kwargs['local_directory']
dump_name = kwargs['dump_name']
zcat_binary = kwargs['zcat_binary']
# The script start really here
if os.path.exists(lock_file):
sys.exit(127)
while subprocess.call([mysql_binary, '--socket=%s' % mysql_socket,
'-u', 'root', '-e', 'use %s;' % database]) != 0:
time.sleep(10)
subprocess.check_call([duplicity_binary, 'restore', '--no-encryption',
remote_backup, local_directory])
zcat = subprocess.Popen([zcat_binary, os.path.join(local_directory,
dump_name)],
stdout=subprocess.PIPE)
mysql = subprocess.Popen([mysql_binary, '--socket=%s' % mysql_socket,
'-D', database, '-u', 'root'],
stdin=zcat.stdout)
zcat.stdout.close()
returncode = mysql.poll()
if returncode == 0:
open(lock_file, 'w').close() # Just a touch
sys.exit(returncode)
......@@ -15,8 +15,6 @@ socket = %(socket)s
datadir = %(data_directory)s
pid-file = %(pid_file)s
log-error = %(error_log)s
slow_query_log
slow_query_log_file = %(slow_query_log)s
long_query_time = 5
max_allowed_packet = 128M
query_cache_size = 32M
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
from hashlib import sha512
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
commandline = [self.options['server-binary']]
commandline.extend(['--callbacks', self.options['callbacks']])
commandline.extend(['--feeds', self.options['feeds']])
commandline.extend(['--equeue-socket', self.options['equeue-socket']])
commandline.append(self.options['host'])
commandline.append(self.options['port'])
return [self.createPythonScript(self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
commandline)]
class Callback(GenericBaseRecipe):
def createCallback(self, notification_id, callback):
callback_id = sha512(notification_id).hexdigest()
callback = self.createFile(os.path.join(self.options['callbacks'],
callback_id),
callback)
return callback
def install(self):
return [self.createCallback(self.options['on-notification-id'],
self.options['callback'])]
class Notify(GenericBaseRecipe):
def createNotifier(self, notifier_binary, executable, wrapper, **kwargs):
if not os.path.exists(kwargs['log']):
# Just a touch
open(kwargs['log'], 'w').close()
commandline = [notifier_binary,
'-l', kwargs['log'],
'--title', kwargs['title'],
'--feed', kwargs['feed_url'],
'--notification-url', kwargs['notification_url'],
executable]
return self.createPythonScript(wrapper,
'slapos.recipe.librecipe.execute.execute',
[str(i) for i in commandline])
def install(self):
feedurl = self.unparseUrl(scheme='http', host=self.options['host'],
port=self.options['port'],
path='/get/%s' % self.options['name'])
script = self.createNotifier(
self.options['notifier-binary'],
wrapper=self.options['wrapper'],
executable=self.options['executable'],
log=os.path.join(self.options['feeds'], self.options['name']),
title=self.options['title'],
notification_url=self.options['notify'],
feed_url=feedurl,
)
return [script]
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from json import loads as unjson
from hashlib import sha512
from urlparse import urlparse
import os
import subprocess
import sys
import signal
import inspect
from slapos.recipe.librecipe import GenericSlapRecipe
from slapos.recipe.dropbear import KnownHostsFile
from slapos.recipe.notifier import Notify
from slapos.recipe.notifier import Callback
from slapos import slap as slapmodule
def promise(args):
def failed_ssh(partition, ssh):
# Bad python 2 syntax, looking forward python 3 to have print(file=)
print >> sys.stderr, "SSH Connection failed"
try:
ssh.terminate()
except:
pass
partition.bang("SSH Connection failed. rdiff-backup is unusable.")
def sigterm_handler(signum, frame):
# Walk up in the stack to get promise local
# variables
ssh = None
for upper_frame in inspect.getouterframes(frame):
# Use promise.func_name insteand of 'promise' in order to be
# detected by editor if promise func name change.
# Else, it's hard to debug this kind of error.
if upper_frame[3] == promise.func_name:
try:
partition = upper_frame[0].f_locals['partition']
ssh = upper_frame[0].f_locals['ssh']
except KeyError:
raise SystemExit("SIGTERM Send too soon.")
break
# If ever promise function wasn't found in the stack.
if ssh is None:
raise SystemExit
failed_ssh(partition, ssh)
signal.signal(signal.SIGTERM, sigterm_handler)
slap = slapmodule.slap()
slap.initializeConnection(args['server_url'],
key_file=args.get('key_file'), cert_file=args.get('cert_file'))
partition = slap.registerComputerPartition(args['computer_id'],
args['partition_id'])
# Rdiff Backup protocol quit command
quitcommand = 'q' + chr(255) + chr(0) * 7
ssh_cmdline = [args['ssh_client'], '%(user)s@%(host)s/%(port)s' % args]
ssh = subprocess.Popen(ssh_cmdline, stdin=subprocess.PIPE,
stdout=open(os.devnull), stderr=open(os.devnull))
ssh.stdin.write(quitcommand)
ssh.stdin.flush()
ssh.stdin.close()
ssh.wait()
if ssh.poll() is None:
return 1
if ssh.returncode != 0:
failed_ssh(partition, ssh)
return ssh.returncode
class Recipe(GenericSlapRecipe, Notify, Callback):
def add_slave(self, entry, known_hosts_file):
path_list = []
url = entry.get('url')
if url is None:
url = ''
# We assume that thanks to sha512 there's no collisions
url_hash = sha512(url).hexdigest()
name_hash = sha512(entry['name']).hexdigest()
promise_path = os.path.join(self.options['promises-directory'],
url_hash)
parsed_url = urlparse(url)
promise_dict = self.promise_base_dict.copy()
promise_dict.update(user=parsed_url.username,
host=parsed_url.hostname,
port=parsed_url.port)
promise = self.createPythonScript(promise_path,
__name__ + '.promise',
promise_dict)
path_list.append(promise)
host = parsed_url.hostname
known_hosts_file[host] = entry['server-key']
remote_schema = '%(ssh)s -p %%s %(user)s@%(host)s' % \
{
'ssh': self.options['sshclient-binary'],
'user': parsed_url.username,
'host': parsed_url.hostname,
}
command = [self.options['rdiffbackup-binary']]
command.extend(['--remote-schema', remote_schema])
remote_directory = '%(port)s::%(path)s' % {'port': parsed_url.port,
'path': parsed_url.path}
local_directory = self.createDirectory(self.options['directory'],
name_hash)
if entry['type'] == 'push':
command.extend(['--restore-as-of', 'now'])
command.append('--force')
command.extend([local_directory, remote_directory])
else:
command.extend([remote_directory, local_directory])
wrapper_basepath = os.path.join(self.options['wrappers-directory'],
url_hash)
wrapper_path = wrapper_basepath
if 'notify' in entry:
wrapper_path = '%s_raw' % wrapper_basepath
wrapper = self.createPythonScript(
wrapper_path,
'slapos.recipe.librecipe.execute.execute',
[str(i) for i in command]
)
path_list.append(wrapper)
if 'notify' in entry:
feed_url = '%s/get/%s' % (self.options['notifier-url'],
entry['notification-id'])
wrapper = self.createNotifier(
self.options['notifier-binary'],
wrapper=wrapper_basepath,
executable=wrapper_path,
log=os.path.join(self.options['feeds'], entry['notification-id']),
title=entry.get('title', 'Untitled'),
notification_url=entry['notify'],
feed_url=feed_url,
)
path_list.append(wrapper)
#self.setConnectionDict(dict(feed_url=feed_url), entry['slave_reference'])
if 'on-notification' in entry:
path_list.append(self.createCallback(str(entry['on-notification']),
wrapper))
else:
cron_entry = os.path.join(self.options['cron-entries'], url_hash)
with open(cron_entry, 'w') as cron_entry_file:
cron_entry_file.write('%s %s' % (entry['frequency'], wrapper))
path_list.append(cron_entry)
return path_list
def _install(self):
path_list = []
if self.optionIsTrue('client', True):
self.logger.info("Client mode")
slap_connection = self.buildout['slap-connection']
self.promise_base_dict = dict(
server_url=slap_connection['server-url'],
computer_id=slap_connection['computer-id'],
cert_file=slap_connection.get('cert-file'),
key_file=slap_connection.get('key-file'),
partition_id=slap_connection['partition-id'],
ssh_client=self.options['sshclient-binary'],
)
slaves = unjson(self.options['slave-instance-list'])
known_hosts = KnownHostsFile(self.options['known-hosts'])
with known_hosts:
for slave in slaves:
path_list.extend(self.add_slave(slave, known_hosts))
else:
command = [self.options['rdiffbackup-binary']]
self.logger.info("Server mode")
command.extend(['--restrict', self.options['path']])
command.append('--server')
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
command)
path_list.append(wrapper)
return path_list
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import zc.buildout
from slapos.recipe.librecipe import GenericSlapRecipe
class Recipe(GenericSlapRecipe):
def _install(self):
publish_dict = dict()
options = self.options.copy()
del options['recipe']
for k, v in options.iteritems():
publish_dict[k] = v
self.setConnectionDict(publish_dict)
return []
......@@ -56,10 +56,10 @@ class Recipe(GenericSlapRecipe):
if option in self.options:
self.urlparts[option] = self.options[option]
if 'user' in self.options:
self.urlparts.update(auth=(self.options['user'],))
if 'username' in self.options:
self.urlparts.update(auth=(self.options['username'],))
if 'password' in self.options:
self.urlparts.update(auth=(self.options['user'],
self.urlparts.update(auth=(self.options['username'],
self.options['password']))
self.setConnectionUrl(**self.urlparts)
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import subprocess
import os
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def _options(self, options):
if not os.path.exists(self.options['file']):
password = subprocess.check_output([self.options['pwgen-binary'], '-1']).strip()
with open(self.options['file'], 'w') as password_file:
password_file.write(password)
else:
with open(self.options['file'], 'r') as password_file:
password = password_file.read()
options['password'] = password
def install(self):
return []
......@@ -25,43 +25,34 @@
#
##############################################################################
import logging
import os
from slapos import slap as slapmodule
class Recipe(object):
def parseMultiValues(self, string):
return dict([ [str(column).strip() for column in line.split('=', 1)]
for line in str(string).splitlines() if '=' in line])
def __init__(self, buildout, name, options):
self.logger = logging.getLogger(name)
slap = slapmodule.slap()
slap_connection = buildout['slap_connection']
self.software_release_url = slap_connection['software_release_url']
self.software_release_url = options['software-url']
# XXX: Dirty network interation stuff
slap.initializeConnection(slap_connection['server_url'],
slap_connection.get('key_file'),
slap_connection.get('cert_file'),
slap.initializeConnection(options['server-url'],
options.get('key-file'),
options.get('cert-file'),
)
computer_partition = slap.registerComputerPartition(
slap_connection['computer_id'], slap_connection['partition_id'])
options['computer-id'], options['partition-id'])
self.request = computer_partition.request
if 'software-url' not in options:
options['software-url'] = self.software_release_url
if 'name' not in options:
options['name'] = name
self.isSlave = False
if 'slave' in options:
self.isSlave = options['slave'].lower() in ['y', 'yes', 'true', '1']
self.return_parameters = []
if 'return' in options:
self.return_parameters = [str(parameter).strip()
for parameter in options['return'].splitlines()]
for parameter in options['return'].split()]
else:
self.logger.warning("No parameter to return to main instance."
"Be careful about that...")
......@@ -72,25 +63,31 @@ class Recipe(object):
filter_kw = {}
if 'sla' in options:
filter_kw = self.parseMultiValues(options['sla'])
for sla_parameter in options['sla'].split():
filter_kw[sla_parameter] = options['sla-%s' % sla_parameter]
partition_parameter_kw = {}
if 'config' in options:
partition_parameter_kw = self.parseMultiValues(options['config'])
for config_parameter in options['config'].split():
partition_parameter_kw[config_parameter] = \
options['config-%s' % config_parameter]
instance = self.request(options['software-url'], software_type,
options['name'], partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw)
options.get('name', name), partition_parameter_kw=partition_parameter_kw,
filter_kw=filter_kw, shared=self.isSlave)
result = {}
self.failed = None
for param in self.return_parameters:
result[param] = instance.getConnectionParameter(param)
# Return the connections parameters in options dict
for key, value in result.items():
options['connection-%s' % key] = value
try:
options['connection-%s' % param] = str(instance.getConnectionParameter(param))
except slapmodule.NotFoundError:
options['connection-%s' % param] = ''
if self.failed is None:
self.failed = param
def install(self):
if self.failed is not None:
raise KeyError("Connection parameter %r not found." % self.failed)
return []
update = install
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import json
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
env = os.environ.copy()
path_list = self.options['path'].split('\n')
env.update(PATH=':'.join(path_list))
env.update(SHELL=self.options['shell'])
env.update(HOME=self.options['home'])
ps1 = self.options.get('ps1')
if ps1 is not None:
env.update(PS1=str(json.loads(ps1)))
else:
env.update(PS1=env.get('PS1', '> '))
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.executee',
[ # Executable
[self.options['shell']],
# Environment
env
]
)
return [wrapper]
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from getpass import getpass
import pwd
import grp
import os
from slapos.recipe.librecipe import GenericBaseRecipe
def login_shell(args):
password = args['password']
entered_password = getpass()
if entered_password != password:
return 1
else:
os.execl(args['shell'], args['shell'])
def shellinabox(args):
certificate_dir = args['certificate_dir']
certificate_path = os.path.join(certificate_dir, 'certificate.pem')
with open(certificate_path, 'w') as certificate_file:
with open(args['ssl_key'], 'r') as key_file:
# XXX: Dirty hack in order to make shellinabox work
print >> certificate_file, key_file.read().replace(' PRIVATE ',
' RSA PRIVATE ')
with open(args['ssl_certificate']) as public_key_file:
print >> certificate_file, public_key_file.read()
user = pwd.getpwuid(os.getuid()).pw_name
group = grp.getgrgid(os.getgid()).gr_name
service = '/:%(user)s:%(group)s:%(directory)s:%(command)s' % {
'user': group,
'group': user,
'directory': args['directory'],
'command': args['login_shell'],
}
command_line = [
args['shellinabox'],
'-d',
'-c', certificate_dir,
'-s', service,
'--ipv6', args['ipv6'],
'-p', args['port'],
]
os.execv(command_line[0], command_line)
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
login_shell = self.createPythonScript(
self.options['login-shell'],
'%s.login_shell' % __name__,
{
'password': self.options['password'],
'shell': self.options['shell']
}
)
path_list.append(login_shell)
wrapper = self.createPythonScript(
self.options['wrapper'],
'%s.shellinabox' % __name__,
dict(
certificate_dir=self.options['certificate-directory'],
ssl_key=self.options['key-file'],
ssl_certificate=self.options['cert-file'],
shellinabox=self.options['shellinabox-binary'],
directory=self.options['directory'],
ipv6=self.options['ipv6'],
port=self.options['port'],
login_shell=login_shell,
)
)
path_list.append(wrapper)
return [wrapper]
......@@ -24,31 +24,25 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import shutil
import os
import sys
import time
from slapos.recipe.librecipe import GenericBaseRecipe
def log(args):
directory, suffix = args
filename = time.strftime('%Y-%m-%d.%H:%M.%s') + suffix
with open(os.path.join(directory, filename), 'aw') as logfile:
shutil.copyfileobj(sys.stdin, logfile)
prefix = time.strftime('%Y-%m-%d.%H:%M.%s:')
with open(args['filename'], 'aw') as logfile:
for line in sys.stdin:
print >> logfile, prefix, line,
print >> logfile, prefix, '------------------------'
class Recipe(GenericBaseRecipe):
def install(self):
self.logger.info("Simple logger installation")
binary = self.options['binary']
output = self.options['output']
suffix = self.options.get('suffix', '.log')
script = self.createPythonScript(binary,
'slapos.recipe.simplelogger.log',
arguments=[output, suffix])
self.logger.debug("Logger script created at : %r", script)
self.logger.info("Simple logger installed.")
wrapper = self.options['wrapper']
log = self.options['log']
script = self.createPythonScript(wrapper,
__name__ + '.log',
arguments=dict(filename=log))
return [script]
......@@ -29,6 +29,7 @@ import os
import sys
import copy
from ConfigParser import ConfigParser
import json
import subprocess
import slapos.slap
import netaddr
......@@ -82,25 +83,28 @@ class Recipe:
if 'default' in self.options:
software_type = 'default'
else:
raise zc.buildout.UserError("This software type isn't mapped. And"
raise zc.buildout.UserError("This software type isn't mapped. And "
"there's no default software type.")
instance_file_path = self.options[software_type]
if not os.path.exists(instance_file_path):
raise zc.buildout.UserError("The specified buildout config file does not"
"exist.")
raise zc.buildout.UserError("The specified buildout config file %r does "
"not exist." % instance_file_path)
buildout = ConfigParser()
with open(instance_file_path) as instance_path:
buildout.readfp(instance_path)
buildout.set('buildout', 'installed',
'.installed-%s.cfg' % software_type)
buildout.set('buildout', 'installed', '.installed-%s.cfg' % self.name)
if not buildout.has_section('slap-parameter'):
buildout.add_section('slap-parameter')
for parameter, value in self.parameter_dict.items():
if isinstance(value, str):
buildout.set('slap-parameter', parameter, value)
else:
buildout.set('slap-parameter', parameter, json.dumps(value))
buildout.add_section('slap-network-information')
buildout.set('slap-network-information', 'local-ipv4',
......@@ -117,7 +121,7 @@ class Recipe:
work_directory = os.path.abspath(self.buildout['buildout'][
'directory'])
buildout_filename = os.path.join(work_directory,
'buildout-%s.cfg' % software_type)
'buildout-%s.cfg' % self.name)
with open(buildout_filename, 'w') as buildout_file:
buildout.write(buildout_file)
......
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
# Configuration file
config = dict(
data_directory=self.options['data-directory'],
ip_address=self.options['ip'],
port=self.options['sphinx-port'],
sql_port=self.options['sql-port'],
searchd_log=self.options['searchd-log'],
query_log=self.options['query-log'],
pid=self.options['pid'],
)
sphinx_conf_path = self.createFile(
self.options['configuration-file'],
self.substituteTemplate(self.getTemplateFilename('sphinx.conf.in'),
config)
)
# Create init script
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute',
[self.options['sphinx-searchd-binary'].strip(), '-c',
sphinx_conf_path, '--nodetach'],
)
return [wrapper, sphinx_conf_path]
......@@ -389,7 +389,7 @@ index erp5
#
# ondisk_dict = 1
# whether to enable in-place inversion (2x less disk, 90-95% speed)
# whether to enable in-place inversion (2x less disk, 90-95%% speed)
# optional, default is 0 (use separate temporary files), indexer-only
#
# inplace_enable = 1
......@@ -460,11 +460,11 @@ searchd
# log file, searchd run info is logged here
# optional, default is 'searchd.log'
log = %(log_directory)s/sphinx-searchd.log
log = %(searchd_log)s
# query log file, all search queries are logged here
# optional, default is empty (do not log queries)
query_log = %(log_directory)s/sphinx-query.log
query_log = %(query_log)s
# client read timeout, seconds
# optional, default is 5
......@@ -480,7 +480,7 @@ searchd
# PID file, searchd process ID file name
# mandatory
pid_file = %(data_directory)s/sphinx-searchd.pid
pid_file = %(pid)s
# max amount of matches the daemon ever keeps in RAM, per-index
# WARNING, THERE'S ALSO PER-QUERY LIMIT, SEE SetLimits() API CALL
......@@ -524,7 +524,7 @@ searchd
# searchd will (try to) log crashed query to 'crash_log_path.PID' file
# optional, default is empty (do not create crash logs)
#
# crash_log_path = %(log_directory)s
# crash_log_path = (log_directory)
# max allowed per-query filter count
# optional, default is 256
......
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import json
import hashlib
import os
import subprocess
import re
from slapos.recipe.librecipe import GenericBaseRecipe
from slapos.recipe.librecipe.inotify import subfiles
# This authority only works with dropbear sshkey generator
def sshkeys_authority(args):
requests_directory = args['requests']
keygen_binary = args['sshkeygen']
for request_filename in subfiles(requests_directory):
with open(request_filename) as request_file:
request = json.load(request_file)
key_type = request.get('type', 'rsa')
size = str(request.get('size', 2048))
try:
private_key = request['private_key']
public_key = request['public_key']
except KeyError:
break
if not os.path.exists(private_key):
if os.path.exists(public_key):
os.unlink(public_key)
keygen_cmd = [keygen_binary, '-t', key_type, '-f', private_key,
'-s', size]
# If the keygeneration return an non-zero status, it means there's a
# big problem. Let's exit in this case
subprocess.check_call(keygen_cmd, env=os.environ.copy())
if not os.path.exists(public_key):
keygen_cmd = [keygen_binary, '-f', private_key, '-y']
keygen = subprocess.Popen(keygen_cmd, stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=os.environ.copy())
keygen.stdin.flush()
keygen.stdin.close()
# If the keygeneration return an non-zero status, it means there's a
# big problem. Let's exit in this case
if keygen.wait() != 0:
raise subprocess.CalledProcessError("%r returned a non-zero status" % \
' '.join(keygen_cmd))
public_key_value = ''
for line in keygen.stdout:
# Perl programming !
# Don't worry, just regex to detect the ssh public key line
matchresult = re.match(r'ssh-.*?=+', line)
if matchresult:
public_key_value = matchresult.group(0)
break
with open(public_key, 'w') as public_key_file:
public_key_file.write(public_key_value)
class Recipe(GenericBaseRecipe):
def install(self):
args = dict(
requests=self.options['request-directory'],
sshkeygen=self.options['keygen-binary'],
)
wrapper = self.createPythonScript(self.options['wrapper'],
__name__ + '.sshkeys_authority', args)
return [wrapper]
class Request(GenericBaseRecipe):
def _options(self, options):
if 'name' not in options:
options['name'] = self.name
keys_directory = options['keys-directory']
self.private_key = os.path.join(keys_directory,
hashlib.sha256(options['name']).hexdigest())
self.public_key = self.private_key + '.pub'
if os.path.exists(self.public_key):
with open(self.public_key) as key:
options['public-key-value'] = key.read()
else:
options['public-key-value'] = ''
def install(self):
requests_directory = self.options['request-directory']
request_file = os.path.join(requests_directory, self.options['name'])
request = dict(
private_key=self.private_key,
public_key=self.public_key,
)
if 'size' in self.options:
request.update(size=int(self.options['size'], 10))
if 'type' in self.options:
request.update(type=self.options['type'])
with open(request_file, 'w') as file_:
json.dump(request, file_)
public_key_link, private_key_link = (self.options['public-key'],
self.options['private-key'],
)
# XXX: Copy and past from certificate_authority/__init__.py:Request
# We should factorize that
for link in [public_key_link, private_key_link]:
if os.path.islink(link):
os.unlink(link)
elif os.path.exists(link):
raise OSError("%r should be a symbolic link." % link)
os.symlink(self.public_key, public_key_link)
os.symlink(self.private_key, private_key_link)
# end-XXX
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute_wait',
[ [self.options['executable']],
[self.private_key, self.public_key] ])
return [request_file, wrapper, public_key_link, private_key_link]
......@@ -24,22 +24,24 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import itertools
import zc.buildout
import os
import signal
import errno
from slapos.recipe.librecipe import GenericBaseRecipe
def post_rotate(args):
pid_file = args['pid_file']
if os.path.exist(pid_file):
with open(pid_file, 'r') as file_:
pid = file_.read().strip()
os.kill(pid, signal.SIGUSR1)
class Recipe(GenericBaseRecipe):
def _options(self, options):
self.types = ['local', 'remote']
self.datas = ['address', 'port']
for type_ in self.types:
for data in self.datas:
opt = '%s-%s' % (type_, data)
if opt not in options:
raise zc.buildout.UserError("No %s for %s connections." % (data, type_))
def install(self):
path_list = []
self.isClient = self.optionIsTrue('client', default=False)
if self.isClient:
......@@ -47,25 +49,15 @@ class Recipe(GenericBaseRecipe):
else:
self.logger.info("Server mode")
if 'name' not in options:
options['name'] = self.name
def install(self):
path_list = []
conf = {}
gathered_options = ['%s-%s' % option
for option in itertools.product(self.types,
self.datas)]
for option in gathered_options:
# XXX: Because the options are using dash and the template uses
# underscore
conf[option.replace('-', '_')] = self.options[option]
for type_ in ['remote', 'local']:
for data in ['host', 'port']:
confkey, opt = ['%s%s%s' % (type_, i, data) for i in ['_', '-']]
conf[confkey] = self.options[opt]
pid_file = self.options['pid-file']
conf.update(pid_file=pid_file)
path_list.append(pid_file)
log_file = self.options['log-file']
conf.update(log=log_file)
......@@ -91,4 +83,21 @@ class Recipe(GenericBaseRecipe):
)
path_list.append(wrapper)
if os.path.exists(pid_file):
with open(pid_file, 'r') as file_:
pid = file_.read().strip()
# Reload configuration
try:
os.kill(int(pid, 10), signal.SIGHUP)
except OSError, e:
if e.errno == errno.ESRCH: # No such process
os.unlink(pid_file)
else:
raise e
if 'post-rotate-script' in self.options:
self.createPythonScript(self.options['post-rotate-script'],
__name__ + 'post_rotate',
dict(pid_file=pid_file))
return path_list
......@@ -6,5 +6,5 @@ key = %(key)s
cert = %(cert)s
[service]
accept = %(remote_address)s:%(remote_port)s
connect = %(local_address)s:%(local_port)s
accept = %(remote_host)s:%(remote_port)s
connect = %(local_host)s:%(local_port)s
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import logging
class Recipe:
def __init__(self, buildout, name, options):
self.buildout = buildout
self.name = name
self.options = options
self.logger = logging.getLogger(self.name)
def install(self):
"""
Links binaries to instance's bin directory for easier exposal
"""
path_list = []
target_directory = self.options['target-directory']
for linkline in self.options['link-binary'].split():
path, linkname = os.path.split(linkline)
link = os.path.join(target_directory, linkname)
if os.path.lexists(link):
if not os.path.islink(link):
raise zc.buildout.UserError(
'Target link already %r exists but it is not link' % link)
os.unlink(link)
os.symlink(linkline, link)
self.logger.debug('Created link %r -> %r' % (link, linkline))
path_list.append(link)
return path_list
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
path_list = []
a = path_list.append
configuration_file = self.createFile(self.options['configuration-path'], self.substituteTemplate(self.getTemplateFilename('tidstorage.py.in'), self.options))
a(configuration_file)
tidstorage_wrapper = self.createPythonScript(self.options['tidstorage-wrapper'], 'slapos.recipe.librecipe.execute.execute', [self.options['tidstoraged-binary'], '--nofork', '--config', configuration_file])
a(tidstorage_wrapper)
repozo_wrapper = self.createPythonScript(self.options['repozo-wrapper'], 'slapos.recipe.librecipe.execute.execute', [self.options['tidstorage-repozo-binary'], '--config', configuration_file, '--repozo', self.options['repozo-binary'], '-z'])
a(repozo_wrapper)
return path_list
known_tid_storage_identifier_dict = %(known-tid-storage-identifier-dict)s
base_url = '%(base-url)s'
address = '%(ip)s'
port = %(port)s
#fork = False
#setuid = None
#setgid = None
burst_period = 30
full_dump_period = 300
timestamp_file_path = '%(timestamp-file-path)s'
logfile_name = '%(logfile-name)s'
pidfile_name = '%(pidfile-name)s'
status_file = '%(status-file)s'
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import shlex
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
def install(self):
files = [f for f in self.options['files'].split('\n') if f]
command_line = shlex.split(self.options['command-line'])
wrapper = self.createPythonScript(
self.options['wrapper'],
'slapos.recipe.librecipe.execute.execute_wait',
[ command_line,
files ],
)
return [wrapper]
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
import os
class Recipe(GenericBaseRecipe):
"""
ZEO instance configuration.
wrapper-path -- location of the init script to generate
binary-path -- location of the runzeo command
ip -- ip of the zeo server
port -- port of the zeo server
log-path -- location of the log file
pid-path -- location of the pid file
conf-path -- location of the configuration file
zodb-path -- location of the zodb directory (which contains all storage)
storage -- string with list of all resquested storage
Example: event_module person_module
"""
def install(self):
snippet_filename = self.getTemplateFilename(
'zeo-filestorage-snippet.conf.in')
# Prepare all filestorages
filestorage_snippet = ""
for storage_definition in self.options['storage'].splitlines():
storage_definition = storage_definition.strip()
if not storage_definition:
continue
for q in storage_definition.split():
if 'storage-name' in q:
storage_name = q.split('=')[1].strip()
if 'zodb-path' in q:
zodb_path = q.split('=')[1].strip()
filestorage_snippet += self.substituteTemplate(
snippet_filename, dict(storage_name=storage_name, path=zodb_path))
config = dict(
zeo_ip=self.options['ip'],
zeo_port=self.options['port'],
zeo_event_log=self.options['log-path'],
zeo_pid=self.options['pid-path'],
zeo_filestorage_snippet=filestorage_snippet,
)
# Create configuration file
template_filename = self.getTemplateFilename('zeo.conf.in')
configuration_path = self.createFile(
self.options['conf-path'],
self.substituteTemplate(template_filename, config))
# Create running wrapper
wrapper_path = self.createPythonScript(
self.options['wrapper-path'],
'slapos.recipe.librecipe.execute.execute',
arguments=[self.options['binary-path'].strip(), '-C',
self.options['conf-path']],)
return [configuration_path, wrapper_path]
<filestorage %(storage_name)s>
path %(path)s
</filestorage>
[buildout]
parts =
davstorage
url
certificate-authority
ca-davstorage
cron
cron-entry-logrotate
logrotate
logrotate-entry-davstorage
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[url]
recipe = slapos.cookbook:publishurl
scheme = webdavs
user = $${davstorage:user}
port = $${davstorage:port}
host = $${davstorage:ip}
password = $${davstorage:password}
[davstorage]
recipe = slapos.cookbook:davstorage
user = user
port = 8080
ip = $${slap-network-information:global-ipv6}
# Path
wrapper = $${rootdirectory:bin}/davstorage
error-log = $${directory:davstorage-log}/error.log
access-log = $${directory:davstorage-log}/access.log
pid-file = $${basedirectory:run}/davstorage.pid
lock-file = $${basedirectory:run}/davstorage.lock
davdb-lock = $${basedirectory:run}/davdb.lock
key-file = $${ca-davstorage:key-file}
cert-file = $${ca-davstorage:cert-file}
conf-file = $${directory:davstorage-conf}/davstorage.conf
htdocs = $${directory:htdocs}
root = $${buildout:directory}
email-address = admin+davstorage@vifib.net
htpasswd-file = $${directory:davstorage-conf}/davstorage.htpasswd
promise = $${basedirectory:promises}/davstorage
# Binaries
apache-binary = ${apache:location}/bin/httpd
apache-modules-dir = ${apache:location}/modules/
apache-mime-file = ${apache:location}/conf/mime.types
apache-htpasswd = ${apache:location}/bin/htpasswd
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/certificate_authority
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:binary}
binary = $${basedirectory:services}/crond
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
dcrond_binary = ${dcron:location}/sbin/crond
logrotate_binary = ${logrotate:location}/usr/sbin/logrotate
apache_binary = ${apache:location}/bin/httpd
apache_modules_dir = ${apache:location}/modules/
apache_mime_file = ${apache:location}/conf/mime.types
apache_htpasswd = ${apache:location}/bin/htpasswd
# Real configuration
key_file = $${certificate_authority:certificate_authority_dir}/davstorage.key
cert_file = $${certificate_authority:certificate_authority_dir}/davstorage.crt
# [ca]
# recipe = slapos.cookbook:ca
# openssl_binary = ${openssl:location}/bin/openssl
# certificate_authority_dir = $${directory:srv}/ssl/
#
# [ca-davstorage]
# recipe = slapos.cookbook:part
# part = $${certificate_authority:part}
# parameters =
# key_file=$${davstorage:key_file}
# cert_file=$${davstorage:cert_file}
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
binary = $${rootdirectory:bin}/cron_simplelogger
output = $${directory:cronoutput}
[logrotate-entry-davstorage]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = davstorage
log = $${davstorage:error-log} $${davstorage:access-log}
frequency = daily
rotate-num = 30
sharedscripts = true
notifempty = true
create = true
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs/
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[ca-davstorage]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
key-file = $${directory:davstorage-conf}/davstorage.key
cert-file = $${directory:davstorage-conf}/davstorage.crt
executable = $${davstorage:wrapper}
wrapper = $${basedirectory:services}/davstorage
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
htdocs = $${rootdirectory:srv}/www/
davstorage-log = $${basedirectory:log}/davstorage/
logrotate-backup = $${basedirectory:backup}/logrotate/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
davstorage-conf = $${rootdirectory:etc}/davstorage/
ca-dir = $${rootdirectory:srv}/ssl/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
cronstamps = $${rootdirectory:etc}/cronstamps/
cronoutput = $${basedirectory:log}/cron/
......@@ -9,4 +9,4 @@ offline = true
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${instance_davstorage:output}
default = ${instance-davstorage:output}
......@@ -12,6 +12,7 @@ extends =
../../stack/shacache-client.cfg
../../component/lxml-python/buildout.cfg
../../component/python-2.7/buildout.cfg
../../component/gzip/buildout.cfg
# Use only quite well working sites.
allow-hosts =
......@@ -24,6 +25,7 @@ allow-hosts =
peak.telecommunity.com
psutil.googlecode.com
www.dabeaz.com
alastairs-place.net
parts =
......@@ -33,9 +35,9 @@ parts =
logrotate
dcron
eggs
gzip
instance-recipe-egg
unzip= true
[eggs]
......@@ -55,14 +57,14 @@ eggs = ${instance-recipe:egg}
# Default template for the instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 4c79e2e42704a2ffe5eebfa0b2e70e28
md5sum = bed788dee6daf05349c4577e7a7f1299
output = ${buildout:directory}/template.cfg
mode = 0644
[instance_davstorage]
[instance-davstorage]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-davstorage.cfg
md5sum = 009103c87c8d31fb758e7a0782e77723
md5sum = 49c8e049a78e233d3a553e64c8914592
output = ${buildout:directory}/template-davstorage.cfg
mode = 0644
......@@ -103,7 +105,7 @@ netaddr = 0.7.6
# Required by:
# slapos.core==0.14
netifaces = 0.4
#netifaces = 0.4
# Required by:
# slapos.cookbook==0.24
......
[buildout]
parts =
publish-cloudooo-connection-information
cloudooo-instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[publish-cloudooo-connection-information]
recipe = slapos.cookbook:publishurl
url = cloudooo://$${cloudooo-instance:ip}:$${cloudooo-instance:port}/
[cloudooo-instance]
recipe = slapos.cookbook:generic.cloudooo
# Network options
ip = $${slap-network-information:local-ipv4}
port = 23000
openoffice-port = 23060
# Paths
configuration-file = $${rootdirectory:etc}/cloudooo.cfg
wrapper = $${basedirectory:services}/cloudooo
# Paths: Data
data-directory = $${directory:cloudooo-data}
environment =
LD_LIBRARY_PATH = ${file:location}/lib:${fontconfig:location}/lib:${freetype:location}/lib:${libICE:location}/lib:${libSM:location}/lib:${libX11:location}/lib:${libXau:location}/lib:${libXdmcp:location}/lib:${libXext:location}/lib:${libXinerama:location}/lib:${libxcb:location}/lib:${zlib:location}/lib
FONTCONFIG_FILE = $${fontconfig-instance:conf-path}
# Binary information
# cloudooo specific configuration
ooo-binary-path = ${libreoffice-bin:location}/program
ooo-paster = ${buildout:bin-directory}/cloudooo_paster
ooo-uno-path = ${libreoffice-bin:location}/basis-link/program
[fontconfig-instance]
recipe = slapos.cookbook:fontconfig
conf-path = $${rootdirectory:etc}/font.conf
font-system-folder = ${fonts:location}
font-folder = $${directory:font}
url-list = $${dynamic-fontconfig-instance:url-list}
service-folder = $${basedirectory:services}
onetimedownload_path = ${buildout:bin-directory}/onetimedownload
# rest of parts are candidates for some generic stuff
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
[directory]
recipe = slapos.cookbook:mkdirectory
cloudooo-data = $${rootdirectory:srv}/cloudooo
font = $${rootdirectory:srv}/font
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
srv = $${buildout:directory}/srv
#############################
#
# Request erp5 development environnment
#
#############################
[buildout]
extends = ${template-zope:output}
parts +=
request-mariadb
request-cloudooo
request-memcached
request-kumofs
basedirectory
erp5-update
test-runner
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[slap-parameter]
# default site id
site-id = erp5
cloudooo-json =
[test-runner]
recipe = slapos.cookbook:erp5.test
certificate-authority-path = $${test-certificate-authority:ca-dir}
mysql-url = $${request-mariadb:connection-url}
kumofs-url = $${request-kumofs:connection-url}
memcached-url = $${request-memcached:connection-url}
cloudooo-url = $${request-cloudooo:connection-url}
test-instance-path = $${directory:unit-test-path}
prepend-path = $${buildout:bin-directory}
run-unit-test = $${buildout:bin-directory}/runUnitTest
run-test-suite = $${buildout:bin-directory}/runTestSuite
openssl-binary = $${test-certificate-authority:openssl-binary}
run-unit-test-binary = ${buildout:bin-directory}/runUnitTest
run-test-suite-binary = ${buildout:bin-directory}/runTestSuite
[test-certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:test-ca-dir}
requests-directory = $${test-cadirectory:requests}
wrapper = $${basedirectory:services}/test-ca
ca-private = $${test-cadirectory:private}
ca-certs = $${test-cadirectory:certs}
ca-newcerts = $${test-cadirectory:newcerts}
ca-crl = $${test-cadirectory:crl}
[test-cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:test-ca-dir}/requests/
private = $${directory:test-ca-dir}/private/
certs = $${directory:test-ca-dir}/certs/
newcerts = $${directory:test-ca-dir}/newcerts/
crl = $${directory:test-ca-dir}/crl/
[erp5-update]
recipe = slapos.cookbook:erp5.update
# Configuration
url = http://$${zope-instance:user}:$${zope-instance:password}@$${zope-instance:ip}:$${zope-instance:port}/
mysql-url = $${request-mariadb:connection-url}
kumofs-url = $${request-kumofs:connection-url}
memcached-url = $${request-memcached:connection-url}
cloudooo-url = $${request-cloudooo:connection-url}
site-id = $${slap-parameter:site-id}
openssl-binary = ${openssl:location}/bin/openssl
cadir-path = $${erp5-certificate-authority:ca-dir}
# Paths
update-wrapper = $${basedirectory:services}/erp5-update
# Defaults
configurator-bt5-list = erp5_full_text_myisam_catalog erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_ung erp5_configurator_ung erp5_configurator_run_my_doc
bt5-repository-list = $${zope-instance:bt5-repository-list}
[request-common]
recipe = slapos.cookbook:request
software-url = $${slap-connection:software-release-url}
sla = computer_guid
sla-computer_guid = $${slap-connection:computer-id}
return = url
server-url = $${slap-connection:server-url}
key-file = $${slap-connection:key-file}
cert-file = $${slap-connection:cert-file}
computer-id = $${slap-connection:computer-id}
partition-id = $${slap-connection:partition-id}
[request-mariadb]
<=request-common
name = MariaDB DataBase
software-type = mariadb
[request-cloudooo]
<=request-common
name = Cloudooo
config = cloudooo-json
config-cloudooo-json = $${slap-parameter:cloudooo-json}
software-type = cloudooo
[request-memcached]
<=request-common
name = Memcached
software-type = memcached
[request-kumofs]
<=request-common
name = KumoFS
software-type = kumofs
# rest of parts are candidates for some generic stuff
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run/
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
[directory]
test-ca-dir = $${rootdirectory:srv}/test-ca/
test-instance-path = $${rootdirectory:srv}/test-instance/
unit-test-path = $${:test-instance-path}/unit_test/
#############################
#
# Request erp5 production environnment
#
#############################
[buildout]
parts =
request-mariadb
request-sphinx
request-cloudooo
request-memcached
request-kumofs
request-tidstorage
basedirectory
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[request-common]
recipe = slapos.cookbook:request
software-url = $${slap-connection:software-release-url}
sla = computer_guid
return = url
server-url = $${slap-connection:server-url}
key-file = $${slap-connection:key-file}
cert-file = $${slap-connection:cert-file}
computer-id = $${slap-connection:computer-id}
partition-id = $${slap-connection:partition-id}
[request-sphinx]
<=request-common
name = Sphinx Search Engine
software-type = sphinx
sla-computer_guid = $${slap-parameter:sphinx-computer-guid}
[request-mariadb]
<=request-common
name = MariaDB DataBase
software-type = mariadb
sla-computer_guid = $${slap-parameter:mariadb-computer-guid}
[request-cloudooo]
<=request-common
name = Cloudooo
config = cloudooo-json
config-cloudooo-json = $${slap-parameter:cloudooo-json}
software-type = cloudooo
sla-computer_guid = $${slap-parameter:cloudooo-computer-guid}
[request-memcached]
<=request-common
name = Memcached
software-type = memcached
sla-computer_guid = $${slap-parameter:memcached-computer-guid}
[request-kumofs]
<=request-common
name = KumoFS
software-type = kumofs
sla-computer_guid = $${slap-parameter:kumofs-computer-guid}
[request-tidstorage]
<=request-common
name = TidStorage
return = url-login
config = json mysql-url memcached-url cloudooo-url kumofs-url
config-json = $${slap-parameter:json}
config-mysql-url = $${request-mariadb:connection-url}
config-memcached-url = $${request-memcached:connection-url}
config-cloudooo-url = $${request-cloudooo:connection-url}
config-kumofs-url = $${request-kumofs:connection-url}
software-type = tidstorage
sla-computer_guid = $${slap-parameter:tidstorage-computer-guid}
[slap-parameter]
# Default value if no computer_guid is specified for each type
sphinx-computer-guid = $${slap-connection:computer-id}
mariadb-computer-guid = $${slap-connection:computer-id}
cloudooo-computer-guid = $${slap-connection:computer-id}
memcached-computer-guid = $${slap-connection:computer-id}
kumofs-computer-guid = $${slap-connection:computer-id}
tidstorage-computer-guid = $${slap-connection:computer-id}
# rest of parts are candidates for some generic stuff
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
[buildout]
parts =
publish-kumofs-connection-information
kumofs-instance
logrotate
logrotate-entry-kumofs
cron
cron-entry-logrotate
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[publish-kumofs-connection-information]
recipe = slapos.cookbook:publishurl
url = memcached://$${kumofs-instance:ip}:$${kumofs-instance:gateway-port}/
[kumofs-instance]
recipe = slapos.cookbook:generic.kumofs
# Network options
ip = $${slap-network-information:local-ipv4}
manager-port = 13101
server-port = 13201
server-listen-port = 13202
gateway-port = 13301
# Paths: Running wrappers
gateway-wrapper = $${basedirectory:services}/kumofs_gateway
manager-wrapper = $${basedirectory:services}/kumofs_manager
server-wrapper = $${basedirectory:services}/kumofs_server
# Paths: Data
data-directory = $${directory:kumofs-data}
# Paths: Logs
kumo-gateway-log = $${basedirectory:log}/kumo-gateway.log
kumo-manager-log = $${basedirectory:log}/kumo-manager.log
kumo-server-log = $${basedirectory:log}/kumo-server.log
# Binary information
kumo-gateway-binary = ${kumo:location}/bin/kumo-gateway
kumo-manager-binary = ${kumo:location}/bin/kumo-manager
kumo-server-binary = ${kumo:location}/bin/kumo-server
[logrotate-entry-kumofs]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = kumofs
log = $${kumofs-instance:kumo-gateway-log} $${kumofs-instance:kumo-manager-log}
$${kumofs-instance:kumo-server-log}
# rest of parts are candidates for some generic stuff
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/run
run = $${rootdirectory:var}/run
backup = $${rootdirectory:srv}/backup
[directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = $${rootdirectory:etc}/cron.d
crontabs = $${rootdirectory:etc}/crontabs
cronstamps = $${rootdirectory:etc}/cronstamps
cronoutput = $${basedirectory:log}/cron
logrotate-backup = $${basedirectory:backup}/logrotate
logrotate-entries = $${rootdirectory:etc}/logrotate.d
kumofs-data = $${rootdirectory:srv}/kumofs
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:binary}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
binary = $${rootdirectory:bin}/cron_simplelogger
output = $${directory:cronoutput}
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[buildout]
parts =
publish-mariadb-url
mariadb-instance
logrotate
logrotate-entry-mariadb
cron
cron-entry-logrotate
cron-entry-mariadb-backup
binary-link
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[publish-mariadb-url]
recipe = slapos.cookbook:publishurl
url = mysql://$${mariadb-instance:user}:$${mariadb-instance:password}@$${mariadb-instance:ip}:$${mariadb-instance:port}/$${mariadb-instance:database}
[cron-entry-mariadb-backup]
<= cron
recipe = slapos.cookbook:cron.d
name = mariadb-backup
frequency = 0 0 * * *
command = $${mariadb-instance:backup-script}
[mariadb-instance]
recipe = slapos.cookbook:generic.mysql
# Options
user = user
parallel-test-database-amount = 100
port = 45678
ip = $${slap-network-information:local-ipv4}
database = erp5
test-user = erp5_test
test-database = erp5_test
mysql-test-database-base = testdb
mysql-test-user-base = testuser
# Paths
wrapper = $${basedirectory:services}/mariadb
update-wrapper = $${basedirectory:services}/mariadb_update
backup-script = $${rootdirectory:bin}/innobackupex-controller
full-backup-directory = $${directory:mariadb-backup-full}
incremental-backup-directory = $${directory:mariadb-backup-incremental}
data-directory = $${directory:mariadb-data}
pid-file = $${basedirectory:run}/mariadb.pid
socket = $${basedirectory:run}/mariadb.sock
error-log = $${basedirectory:log}/mariadb_error.log
slow-query-log = $${basedirectory:log}/mariadb_slowquery.log
conf-file = $${rootdirectory:etc}/mariadb.cnf
dumpname = dump.sql.gz
bin-directory = $${rootdirectory:bin}
innobackupex-incremental = $${rootdirectory:bin}/innobackupex-incremental
innobackupex-full = $${rootdirectory:bin}/innobackupex-full
# Binary information
innobackupex-binary = ${xtrabackup:location}/bin/innobackupex
mysql-binary = ${mariadb:location}/bin/mysql
mysql-install-binary = ${mariadb:location}/bin/mysql_install_db
mysql-upgrade-binary = ${mariadb:location}/bin/mysql_upgrade
mysqld-binary = ${mariadb:location}/libexec/mysqld
pt-archiver-binary = ${perl:siteprefix}/bin/pt-archiver
pt-config-diff-binary = ${perl:siteprefix}/bin/pt-config-diff
pt-deadlock-logger-binary = ${perl:siteprefix}/bin/pt-deadlock-logger
pt-duplicate-key-checker-binary = ${perl:siteprefix}/bin/pt-duplicate-key-checker
pt-fifo-split-binary = ${perl:siteprefix}/bin/pt-fifo-split
pt-find-binary = ${perl:siteprefix}/bin/pt-find
pt-fk-error-logger-binary = ${perl:siteprefix}/bin/pt-fk-error-logger
pt-heartbeat-binary = ${perl:siteprefix}/bin/pt-heartbeat
pt-index-usage-binary = ${perl:siteprefix}/bin/pt-index-usage
pt-kill-binary = ${perl:siteprefix}/bin/pt-kill
pt-log-player-binary = ${perl:siteprefix}/bin/pt-log-player
pt-online-schema-change-binary = ${perl:siteprefix}/bin/pt-online-schema-change
pt-query-advisor-binary = ${perl:siteprefix}/bin/pt-query-advisor
pt-query-digest-binary = ${perl:siteprefix}/bin/pt-query-digest
pt-show-grants-binary = ${perl:siteprefix}/bin/pt-show-grants
pt-slave-delay-binary = ${perl:siteprefix}/bin/pt-slave-delay
pt-slave-find-binary = ${perl:siteprefix}/bin/pt-slave-find
pt-slave-restart-binary = ${perl:siteprefix}/bin/pt-slave-restart
pt-table-checksum-binary = ${perl:siteprefix}/bin/pt-table-checksum
pt-table-sync-binary = ${perl:siteprefix}/bin/pt-table-sync
pt-tcp-model-binary = ${perl:siteprefix}/bin/pt-tcp-model
pt-trend-binary = ${perl:siteprefix}/bin/pt-trend
pt-upgrade-binary = ${perl:siteprefix}/bin/pt-upgrade
pt-variable-advisor-binary = ${perl:siteprefix}/bin/pt-variable-advisor
pt-visual-explain-binary = ${perl:siteprefix}/bin/pt-visual-explain
xtrabackup-binary = ${xtrabackup:location}/bin/xtrabackup_51
perl-binary = ${perl:location}/bin/perl
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-mariadb]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = mariadb
log = $${mariadb-instance:error-log} $${mariadb-instance:slow-query-log}
post = $${mariadb-instance:mysql-binary} --no-defaults -B --socket=$${mariadb-instance:socket} -e "FLUSH LOGS"
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:binary}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
binary = $${rootdirectory:bin}/cron_simplelogger
output = $${directory:cronoutput}
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[binary-link]
recipe = slapos.cookbook:symbolic.link
target-directory = $${rootdirectory:bin}
link-binary =
${coreutils:location}/bin/basename
${coreutils:location}/bin/cat
${coreutils:location}/bin/cp
${coreutils:location}/bin/ls
${coreutils:location}/bin/tr
${coreutils:location}/bin/uname
${gettext:location}/lib/gettext/hostname
${grep:location}/bin/grep
${sed:location}/bin/sed
${mariadb:location}/bin/mysql
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/run
run = $${rootdirectory:var}/run
backup = $${rootdirectory:srv}/backup
[directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = $${rootdirectory:etc}/cron.d
crontabs = $${rootdirectory:etc}/crontabs
cronstamps = $${rootdirectory:etc}/cronstamps
cronoutput = $${basedirectory:log}/cron
ca-dir = $${rootdirectory:srv}/ssl
mariadb-backup-full = $${basedirectory:backup}/mariadb-full
mariadb-backup-incremental = $${basedirectory:backup}/mariadb-incremental
mariadb-data = $${rootdirectory:srv}/mariadb
logrotate-backup = $${basedirectory:backup}/logrotate
logrotate-entries = $${rootdirectory:etc}/logrotate.d
#############################
#
# Instanciate memcached
#
# No slapos parameter needed
#
#############################
[buildout]
parts =
memcached-instance
publish-memcached-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
bin = $${buildout:directory}/bin
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
[memcached-instance]
recipe = slapos.cookbook:generic.memcached
wrapper_path = $${basedirectory:services}/memcached
binary_path = ${memcached:location}/bin/memcached
ip = $${slap-network-information:local-ipv4}
port = 11000
[publish-memcached-connection-information]
recipe = slapos.cookbook:publishurl
url = memcached://$${memcached-instance:ip}:$${memcached-instance:port}/
[buildout]
parts =
publish-sphinx-url
sphinxd-instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[publish-sphinx-url]
recipe = slapos.cookbook:publish
url-sphinx = sphinx://$${sphinxd-instance:ip}:$${sphinxd-instance:sphinx-port}
url-sphinx-sql = mysql://$${sphinxd-instance:ip}:$${sphinxd-instance:sql-port}
[sphinxd-instance]
recipe = slapos.cookbook:sphinx
data-directory = $${directory:sphinx-data}
configuration-file = $${rootdirectory:etc}/sphinx.conf
searchd-log = $${basedirectory:log}/sphinx-searchd.log
query-log = $${basedirectory:log}/sphinx-query.log
pid = $${basedirectory:run}/sphinx-searchd.pid
ip = $${slap-network-information:local-ipv4}
sphinx-port = 9312
sql-port = 9306
wrapper = $${basedirectory:services}/sphinxd
sphinx-searchd-binary = ${sphinx:location}/bin/searchd
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/run
run = $${rootdirectory:var}/run
[directory]
recipe = slapos.cookbook:mkdirectory
sphinx-data = $${rootdirectory:srv}/sphinx
[buildout]
parts =
certificate-authority
publish-apache-zope-backend-connection-string
zope-instance
apache-zope-backend-instance
ca-apache-zope-backend
logrotate
erp5-certificate-authority
logrotate-entry-zope
cron
cron-entry-logrotate
binary-link
bt5-erp5-link
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[slap-parameter]
# By default backend disallows the access
access-control-string = none
[publish-apache-zope-backend-connection-string]
recipe = slapos.cookbook:publish
url = https://$${zope-instance:user}:$${zope-instance:password}@[$${apache-zope-backend-instance:ip}]:$${apache-zope-backend-instance:port}
url-deadlock = $${:url}/$${zope-instance:deadlock-path}?$${zope-instance:deadlock-password}
[apache-zope-backend-instance]
recipe = slapos.cookbook:apache.zope.backend
backend = http://$${zope-instance:ip}:$${zope-instance:port}/
ip = $${slap-network-information:global-ipv6}
port = 16001
wrapper = $${rootdirectory:bin}/apache
key-file = $${directory:apache-conf}/apache.key
cert-file = $${directory:apache-conf}/apache.crt
configuration-file = $${directory:apache-conf}/apache.conf
access-control-string = $${slap-parameter:access-control-string}
pid-file = $${basedirectory:run}/apache.pid
lock-file = $${basedirectory:run}/apache.lock
error-log = $${basedirectory:log}/apache-error.log
access-log = $${basedirectory:log}/apache-access.log
apache-binary = ${apache:location}/bin/httpd
[ca-apache-zope-backend]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
key-file = $${apache-zope-backend-instance:key-file}
cert-file = $${apache-zope-backend-instance:cert-file}
executable = $${apache-zope-backend-instance:wrapper}
wrapper = $${basedirectory:services}/apache
[zope-instance]
recipe = slapos.cookbook:generic.zope
# XXX: products won't be needed as soon as all ERP5 (and products-deps)
# products will be eggified so then it will be possible to use them thanks to
# availability in software's eggs
products = ${products:list}
# Options
user = zope
port = 12001
ip = $${slap-network-information:local-ipv4}
zodb-cache-size = 5000
thread-amount = 4
zodb-path = $${directory:zodb}/main.fs
deadlock-path = /manage_debug_threads
# Paths
wrapper = $${basedirectory:services}/zope-development
instance-path = $${directory:instance}
instance-etc = $${directory:instance-etc}
instance-products = $${directory:instance-products}
bt5-repository = $${rootdirectory:var}/bt5_repository
tmp-path = $${rootdirectory:tmp}
bin-path = $${rootdirectory:bin}
site-zcml = $${:instance-etc}/site.zcml
logrotate-post = $${rootdirectory:bin}/zope-post-logrotate
pid-file = $${basedirectory:run}/zope.pid
lock-file = $${basedirectory:run}/zope.lock
event-log = $${basedirectory:log}/zope-event.log
z2-log = $${basedirectory:log}/zope-Z2.log
configuration-file = $${rootdirectory:etc}/zope.conf
inituser = $${:instance-path}/inituser
# Binary information
runzope-binary = ${buildout:bin-directory}/runzope
# Defaults
bt5-repository-list =
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-zope]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = zope
log = $${zope-instance:event-log} $${zope-instance:z2-log}
frequency = daily
rotate-num = 30
post = ${buildout:bin-directory}/killpidfromfile $${zope-instance:pid-file} SIGUSR2
sharedscripts = true
notifempty = true
create = true
[logrotate-entry-apache-zope-backend]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = zope
log = $${apache-zope-backend-instance:error-log} $${apache-zope-backend-instance:access-log}
frequency = daily
rotate-num = 30
post = ${buildout:bin-directory}/killpidfromfile $${apache-zope-backend-instance:pid-file} SIGUSR1
sharedscripts = true
notifempty = true
create = true
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:binary}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
binary = $${rootdirectory:bin}/cron_simplelogger
output = $${directory:cronoutput}
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[binary-link]
recipe = slapos.cookbook:symbolic.link
target-directory = $${rootdirectory:bin}
link-binary =
${coreutils:location}/bin/basename
${coreutils:location}/bin/cat
${coreutils:location}/bin/cp
${coreutils:location}/bin/ls
${coreutils:location}/bin/tr
${coreutils:location}/bin/uname
${git:location}/bin/git
${graphviz:location}/bin/dot
${grep:location}/bin/grep
${imagemagick:location}/bin/convert
${imagemagick:location}/bin/identify
${mariadb:location}/bin/mysql
${mariadb:location}/bin/mysqldump
${pdftk:location}/bin/pdftk
${sed:location}/bin/sed
${tesseract:location}/bin/tesseract
${w3m:location}/bin/w3m
${poppler:location}/bin/pdfinfo
${poppler:location}/bin/pdftotext
${poppler:location}/bin/pdftohtml
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[bt5-erp5-link]
recipe = slapos.cookbook:symbolic.link
target-directory = $${basedirectory:bt5-erp5}
link-binary =
${erp5:location}/product/ERP5/bootstrap
${erp5:location}/bt5
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests
private = $${directory:ca-dir}/private
certs = $${directory:ca-dir}/certs
newcerts = $${directory:ca-dir}/newcerts
crl = $${directory:ca-dir}/crl
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/run
run = $${rootdirectory:var}/run
backup = $${rootdirectory:srv}/backup
bt5-erp5 = $${rootdirectory:var}/bt5_erp5
[erp5-certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:erp5-ca-dir}
requests-directory = $${erp5-cadirectory:requests}
wrapper = $${basedirectory:services}/erp5-ca
ca-private = $${erp5-cadirectory:private}
ca-certs = $${erp5-cadirectory:certs}
ca-newcerts = $${erp5-cadirectory:newcerts}
ca-crl = $${erp5-cadirectory:crl}
[erp5-cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:erp5-ca-dir}/requests/
private = $${directory:erp5-ca-dir}/private/
certs = $${directory:erp5-ca-dir}/certs/
newcerts = $${directory:erp5-ca-dir}/newcerts/
crl = $${directory:erp5-ca-dir}/crl/
[directory]
recipe = slapos.cookbook:mkdirectory
zodb = $${rootdirectory:srv}/zodb
instance = $${rootdirectory:srv}/erp5shared
instance-etc = $${:instance}/etc
apache-conf = $${rootdirectory:etc}/apache
instance-etc-package-include = $${:instance}/etc/package-include
# because of bug in slapos.recipe.template keys are lowercased
instance-document = $${:instance}/Document
instance-propertysheet = $${:instance}/PropertySheet
instance-products = $${:instance}/Products
instance-extensions = $${:instance}/Extensions
instance-constraint = $${:instance}/Constraint
instance-import = $${:instance}/import
instance-lib = $${:instance}/lib
instance-tests = $${:instance}/tests
erp5-ca-dir = $${rootdirectory:srv}/erp5-ca/
ca-dir = $${rootdirectory:srv}/ssl
cron-entries = $${rootdirectory:etc}/cron.d
crontabs = $${rootdirectory:etc}/crontabs
cronstamps = $${rootdirectory:etc}/cronstamps
cronoutput = $${basedirectory:log}/cron
logrotate-backup = $${basedirectory:backup}/logrotate
logrotate-entries = $${rootdirectory:etc}/logrotate.d
[instance]
rdiff_backup_binary = ${buildout:bin-directory}/rdiff-backup
[buildout]
parts =
instance
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
dcrond_binary = ${dcron:location}/sbin/crond
haproxy_binary = ${haproxy:location}/sbin/haproxy
gzip_binary = ${gzip:location}/bin/gzip
httpd_binary = ${apache:location}/bin/httpd
innobackupex_binary = ${xtrabackup:location}/bin/innobackupex
kumo_gateway_binary = ${kumo:location}/bin/kumo-gateway
kumo_manager_binary = ${kumo:location}/bin/kumo-manager
kumo_server_binary = ${kumo:location}/bin/kumo-server
logrotate_binary = ${logrotate:location}/usr/sbin/logrotate
memcached_binary = ${memcached:location}/bin/memcached
mysql_binary = ${mariadb:location}/bin/mysql
mysql_install_binary = ${mariadb:location}/bin/mysql_install_db
mysql_upgrade_binary = ${mariadb:location}/bin/mysql_upgrade
mysqld_binary = ${mariadb:location}/libexec/mysqld
openssl_binary = ${openssl:location}/bin/openssl
perl_binary = ${perl:location}/bin/perl
rdiff_backup_binary = ${buildout:bin-directory}/rdiff-backup
repozo_binary = ${buildout:bin-directory}/repozo
runTestSuite_binary = ${buildout:bin-directory}/runTestSuite
runUnitTest_binary = ${buildout:bin-directory}/runUnitTest
runzeo_binary = ${buildout:bin-directory}/runzeo
runzope_binary = ${buildout:bin-directory}/runzope
sphinx_searchd_binary = ${sphinx:location}/bin/searchd
tidstorage_repozo_binary = ${buildout:bin-directory}/tidstorage_repozo
tidstoraged_binary = ${buildout:bin-directory}/tidstoraged
xtrabackup_binary = ${xtrabackup:location}/bin/xtrabackup_51
zabbix_agentd_binary = ${zabbix-agent:location}/sbin/zabbix_agentd
pt-archiver_binary = ${perl:siteprefix}/bin/pt-archiver
pt-config-diff_binary = ${perl:siteprefix}/bin/pt-config-diff
pt-deadlock-logger_binary = ${perl:siteprefix}/bin/pt-deadlock-logger
pt-duplicate-key-checker_binary = ${perl:siteprefix}/bin/pt-duplicate-key-checker
pt-fifo-split_binary = ${perl:siteprefix}/bin/pt-fifo-split
pt-find_binary = ${perl:siteprefix}/bin/pt-find
pt-fk-error-logger_binary = ${perl:siteprefix}/bin/pt-fk-error-logger
pt-heartbeat_binary = ${perl:siteprefix}/bin/pt-heartbeat
pt-index-usage_binary = ${perl:siteprefix}/bin/pt-index-usage
pt-kill_binary = ${perl:siteprefix}/bin/pt-kill
pt-log-player_binary = ${perl:siteprefix}/bin/pt-log-player
pt-online-schema-change_binary = ${perl:siteprefix}/bin/pt-online-schema-change
pt-query-advisor_binary = ${perl:siteprefix}/bin/pt-query-advisor
pt-query-digest_binary = ${perl:siteprefix}/bin/pt-query-digest
pt-show-grants_binary = ${perl:siteprefix}/bin/pt-show-grants
pt-slave-delay_binary = ${perl:siteprefix}/bin/pt-slave-delay
pt-slave-find_binary = ${perl:siteprefix}/bin/pt-slave-find
pt-slave-restart_binary = ${perl:siteprefix}/bin/pt-slave-restart
pt-table-checksum_binary = ${perl:siteprefix}/bin/pt-table-checksum
pt-table-sync_binary = ${perl:siteprefix}/bin/pt-table-sync
pt-tcp-model_binary = ${perl:siteprefix}/bin/pt-tcp-model
pt-trend_binary = ${perl:siteprefix}/bin/pt-trend
pt-upgrade_binary = ${perl:siteprefix}/bin/pt-upgrade
pt-variable-advisor_binary = ${perl:siteprefix}/bin/pt-variable-advisor
pt-visual-explain_binary = ${perl:siteprefix}/bin/pt-visual-explain
# cloudooo specific configuration
ooo_binary_path = ${libreoffice-bin:location}/program
ooo_paster = ${buildout:bin-directory}/cloudooo_paster
ooo_uno_path = ${libreoffice-bin:location}/basis-link/program
link_binary_list =
${coreutils:location}/bin/basename
${coreutils:location}/bin/cat
${coreutils:location}/bin/cp
${coreutils:location}/bin/ls
${coreutils:location}/bin/tr
${coreutils:location}/bin/uname
${gettext:location}/lib/gettext/hostname
${git:location}/bin/git
${graphviz:location}/bin/dot
${grep:location}/bin/grep
${imagemagick:location}/bin/convert
${imagemagick:location}/bin/identify
${mariadb:location}/bin/mysql
${mariadb:location}/bin/mysqldump
${pdftk:location}/bin/pdftk
${sed:location}/bin/sed
${tesseract:location}/bin/tesseract
${w3m:location}/bin/w3m
${poppler:location}/bin/pdfinfo
${poppler:location}/bin/pdftotext
${poppler:location}/bin/pdftohtml
[dynamic-template-tidstorage]
recipe = slapos.cookbook:generate.erp5.tidstorage
snippet-zeo = ${template-snippet-zeo:output}
snippet-zope = ${template-snippet-zope:output}
snippet-master = ${template-snippet-master:output}
snippet-backend = ${template-snippet-backend:output}
# XXX: products won't be needed as soon as all ERP5 (and products-deps)
# products will be eggified so then it will be possible to use them thanks to
# availability in software's eggs
products = ${products:list}
environment =
LD_LIBRARY_PATH = ${file:location}/lib:${fontconfig:location}/lib:${freetype:location}/lib:${libICE:location}/lib:${libSM:location}/lib:${libX11:location}/lib:${libXau:location}/lib:${libXdmcp:location}/lib:${libXext:location}/lib:${libXinerama:location}/lib:${libxcb:location}/lib:${zlib:location}/lib
[dynamic-template-cloudooo]
recipe = slapos.cookbook:generate.cloudooo
template = ${template-cloudooo:output}
snippet-fontconfig = ${template-snippet-fontconfig:output}
bt5_repository_list = ${bt5-repository:list}
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-erp5-development:output}
production = ${template-erp5-production:output}
kumofs = ${template-kumofs:output}
memcached = ${template-memcached:output}
cloudooo = $${dynamic-template-cloudooo:output}
zope = ${template-zope:output}
mariadb = ${template-mariadb:output}
sphinx = ${template-sphinx:output}
tidstorage = $${dynamic-template-tidstorage:output}
configurator_bt5_list = ${instance-recipe:configurator_bt5_list}
[slap-connection]
# part to migrate to new - separated words
computer-id = $${slap_connection:computer_id}
partition-id = $${slap_connection:partition_id}
server-url = $${slap_connection:server_url}
software-release-url = $${slap_connection:software_release_url}
key-file = $${slap_connection:key_file}
cert-file = $${slap_connection:cert_file}
[apache-%(backend_name)s]
recipe = slapos.cookbook:apache.zope.backend
backend = http://$${haproxy-%(backend_name)s:ip}:$${haproxy-%(backend_name)s:port}/
ip = $${slap-network-information:global-ipv6}
port = %(apache_port)s
wrapper = $${rootdirectory:bin}/apache-%(backend_name)s
key-file = $${directory:apache-conf}/apache-%(backend_name)s.key
cert-file = $${directory:apache-conf}/apache-%(backend_name)s.crt
configuration-file = $${directory:apache-conf}/apache-%(backend_name)s.conf
access-control-string = %(access_control_string)s
pid-file = $${basedirectory:run}/apache-%(backend_name)s.pid
lock-file = $${basedirectory:run}/apache-%(backend_name)s.lock
error-log = $${basedirectory:log}/apache-%(backend_name)s-error.log
access-log = $${basedirectory:log}/apache-%(backend_name)s-access.log
apache-binary = ${apache:location}/bin/httpd
[ca-apache-%(backend_name)s]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
key-file = $${apache-%(backend_name)s:key-file}
cert-file = $${apache-%(backend_name)s:cert-file}
executable = $${apache-%(backend_name)s:wrapper}
wrapper = $${basedirectory:services}/apache-%(backend_name)s
[logrotate-entry-apache-%(backend_name)s]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = apache-%(backend_name)s
log = $${apache-%(backend_name)s:error-log} $${apache-%(backend_name)s:access-log}
post = ${buildout:bin-directory}/killpidfromfile $${apache-%(backend_name)s:pid-file} SIGUSR1
[haproxy-%(backend_name)s]
recipe = slapos.cookbook:haproxy
name = %(backend_name)s
conf-path = $${rootdirectory:etc}/haproxy-%(backend_name)s.cfg
ip = $${slap-network-information:local-ipv4}
port = %(haproxy_port)s
maxconn = %(maxconn)s
server-check-path = %(server_check_path)s
wrapper-path = $${basedirectory:services}/haproxy-%(backend_name)s
binary-path = ${haproxy:location}/sbin/haproxy
backend-list = %(haproxy_backend_list)s
[dynamic-fontconfig-instance]
url-list = %(font_url_list)s
#############################
#
# Instanciate zeo
#
# zeo-id -- local id of the requested zeo (1,2,3,...)
#
# zeo-port -- ip port to use to run the process
#
# storage_list -- string with list of all resquested storage
# Example: event_module person_module
#
#############################
[buildout]
parts =
logrotate
cron
cron-entry-logrotate
certificate-authority
erp5-certificate-authority
tidstorage
cron-entry-tidstorage-backup
logrotate-entry-tidstorage
binary-link
erp5-update
%(part_list)s
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[tidstorage]
recipe = slapos.cookbook:tidstorage
known-tid-storage-identifier-dict = %(known_tid_storage_identifier_dict)s
base-url = http://$${%(haproxy_section)s:ip}:$${%(haproxy_section)s:port}/%%s/serialize
configuration-path = $${rootdirectory:etc}/tidstorage.py
ip = $${slap-network-information:local-ipv4}
port = 6001
timestamp-file-path = $${directory:tidstorage}/repozo_tidstorage_timestamp.log
logfile-name = $${basedirectory:log}/tidstorage.log
pidfile-name = $${basedirectory:run}/tidstorage.pid
status-file = $${directory:tidstorage}/tidstorage.tid
tidstorage-repozo-binary = ${buildout:bin-directory}/tidstorage_repozo
tidstoraged-binary = ${buildout:bin-directory}/tidstoraged
repozo-binary = ${buildout:bin-directory}/repozo
tidstorage-wrapper = $${basedirectory:services}/tidstoraged
repozo-wrapper = $${buildout:bin-directory}/tidstorage-repozo
[cron-entry-tidstorage-backup]
<= cron
recipe = slapos.cookbook:cron.d
name = tidstorage
frequency = 0 0 * * *
command = $${tidstorage:repozo-wrapper}
[logrotate-entry-tidstorage]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = tidstorage
log = $${tidstorage:logfile-name}
post = ${buildout:bin-directory}/killpidfromfile $${tidstorage:pidfile-name} SIGHUP
[directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
cronstamps = $${rootdirectory:etc}/cronstamps/
cronoutput = $${basedirectory:log}/cron/
logrotate-backup = $${basedirectory:backup}/logrotate/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:binary}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
binary = $${rootdirectory:bin}/cron_simplelogger
output = $${directory:cronoutput}
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
# rest of parts are candidates for some generic stuff
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[zeo-instance-entry-common]
recipe = slapos.cookbook:zeo
ip = $${slap-network-information:local-ipv4}
binary-path = ${buildout:bin-directory}/runzeo
[erp5-certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:erp5-ca-dir}
requests-directory = $${erp5-cadirectory:requests}
wrapper = $${basedirectory:services}/erp5-ca
ca-private = $${erp5-cadirectory:private}
ca-certs = $${erp5-cadirectory:certs}
ca-newcerts = $${erp5-cadirectory:newcerts}
ca-crl = $${erp5-cadirectory:crl}
[erp5-cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:erp5-ca-dir}/requests/
private = $${directory:erp5-ca-dir}/private/
certs = $${directory:erp5-ca-dir}/certs/
newcerts = $${directory:erp5-ca-dir}/newcerts/
crl = $${directory:erp5-ca-dir}/crl/
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs/
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
tmp = $${buildout:directory}/tmp/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
[directory]
recipe = slapos.cookbook:mkdirectory
tidstorage = $${rootdirectory:srv}/tidstorage/
zodb = $${rootdirectory:srv}/zodb/
zodb-backup = $${basedirectory:backup}/zodb
instance = $${rootdirectory:srv}/erp5shared/
instance-etc = $${:instance}/etc
apache-conf = $${rootdirectory:etc}/apache
instance-etc-package-include = $${:instance}/etc/package-include
# because of bug in slapos.recipe.template keys are lowercased
instance-document = $${:instance}/Document
instance-propertysheet = $${:instance}/PropertySheet
instance-products = $${:instance}/Products
instance-extensions = $${:instance}/Extensions
instance-constraint = $${:instance}/Constraint
instance-import = $${:instance}/import
instance-lib = $${:instance}/lib
instance-tests = $${:instance}/tests
erp5-ca-dir = $${rootdirectory:srv}/erp5-ssl/
ca-dir = $${rootdirectory:srv}/ssl/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
cronstamps = $${rootdirectory:etc}/cronstamps/
cronoutput = $${basedirectory:log}/cron/
logrotate-backup = $${basedirectory:backup}/logrotate/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
[binary-link]
recipe = slapos.cookbook:symbolic.link
target-directory = $${rootdirectory:bin}
link-binary =
${coreutils:location}/bin/basename
${coreutils:location}/bin/cat
${coreutils:location}/bin/cp
${coreutils:location}/bin/ls
${coreutils:location}/bin/tr
${coreutils:location}/bin/uname
${git:location}/bin/git
${graphviz:location}/bin/dot
${grep:location}/bin/grep
${imagemagick:location}/bin/convert
${imagemagick:location}/bin/identify
${mariadb:location}/bin/mysql
${mariadb:location}/bin/mysqldump
${pdftk:location}/bin/pdftk
${sed:location}/bin/sed
${tesseract:location}/bin/tesseract
${w3m:location}/bin/w3m
${poppler:location}/bin/pdfinfo
${poppler:location}/bin/pdftotext
${poppler:location}/bin/pdftohtml
[erp5-update]
recipe = slapos.cookbook:erp5.update
# Configuration
url = http://$${%(zope_section)s:user}:$${%(zope_section)s:password}@$${%(zope_section)s:ip}:$${%(zope_section)s:port}/
mysql-url = %(mysql-url)s
kumofs-url = %(kumofs-url)s
memcached-url = %(memcached-url)s
cloudooo-url = %(cloudooo-url)s
site-id = %(site_id)s
openssl-binary = ${openssl:location}/bin/openssl
cadir-path = $${erp5-certificate-authority:ca-dir}
# Paths
update-wrapper = $${basedirectory:services}/erp5-update
# Defaults
configurator-bt5-list = erp5_full_text_myisam_catalog erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_ung erp5_configurator_ung erp5_configurator_run_my_doc
bt5-repository-list = $${%(zope_section)s:bt5-repository-list}
[zeo-instance-%(zeo_id)s]
<= zeo-instance-entry-common
log-path = $${basedirectory:log}/zeo-%(zeo_id)s.log
pid-path = $${basedirectory:run}/zeo-%(zeo_id)s.pid
conf-path = $${rootdirectory:etc}/zeo-%(zeo_id)s.conf
port = %(zeo_port)s
storage =
%(storage_list)s
wrapper-path = $${basedirectory:services}/zeo-%(zeo_id)s
[logrotate-entry-zeo-%(zeo_id)s]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = zeo-%(zeo_id)s
log = $${zeo-instance-%(zeo_id)s:log-path}
post = ${buildout:bin-directory}/killpidfromfile $${zeo-instance-%(zeo_id)s:pid-path} SIGUSR2
[%(zope_id)s]
recipe = slapos.cookbook:generic.zope.zeo.client
# XXX: products won't be needed as soon as all ERP5 (and products-deps)
# products will be eggified so then it will be possible to use them thanks to
# availability in software's eggs
products = ${products:list}
# Options
user = zope
port = %(zope_port)s
ip = $${slap-network-information:local-ipv4}
thread-amount = %(zope_thread_amount)s
deadlock-path = /manage_debug_threads
timezone = %(timezone)s
zeo-connection-string =
%(zeo_connection_string)s
timeserver = %(zope_timeserver)s
# long request
longrequest-logger-file = %(longrequest_logger_file)s
longrequest-logger-timeout = %(longrequest_logger_timeout)s
longrequest-logger-interval = %(longrequest_logger_interval)s
# Paths
wrapper = $${basedirectory:services}/%(zope_id)s
instance-path = $${directory:instance}
instance-etc = $${directory:instance-etc}
instance-products = $${directory:instance-products}
bt5-repository = $${rootdirectory:var}/bt5_repository
tmp-path = $${rootdirectory:tmp}
bin-path = $${rootdirectory:bin}
site-zcml = $${:instance-etc}/site.zcml
pid-file = $${basedirectory:run}/%(zope_id)s.pid
lock-file = $${basedirectory:run}/%(zope_id)s.lock
event-log = $${basedirectory:log}/%(zope_id)s-event.log
z2-log = $${basedirectory:log}/%(zope_id)s-Z2.log
configuration-file = $${rootdirectory:etc}/%(zope_id)s.conf
inituser = $${:instance-path}/inituser
# Binary information
runzope-binary = ${buildout:bin-directory}/runzope
# BT5 Configuration
bt5-repository-list =
[logrotate-entry-%(zope_id)s]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = %(zope_id)s
log = $${%(zope_id)s:event-log} $${%(zope_id)s:z2-log}
post = ${buildout:bin-directory}/killpidfromfile $${%(zope_id)s:pid-file} SIGUSR2
......@@ -5,7 +5,14 @@ extends =
versions = versions
# Local development
develop =
${:parts-directory}/slapos.cookbook-repository
parts +=
# Local development
slapos.cookbook-repository
check-recipe
slapos.cookbook-python2.6
# Create instance template
template
validator
......@@ -15,23 +22,124 @@ parts +=
# development / fast switching environment for whole software
unzip = true
[instance-recipe]
# Note: In case if specific instantiation recipe is used this is the place to
# put its name
egg = slapos.cookbook
module = erp5
# Local development
[slapos.cookbook-repository]
recipe = plone.recipe.command
stop-on-error = true
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = ${git:location}/bin/git clone --branch erp5 --quiet http://git.erp5.org/repos/slapos.git ${:location}
update-command = cd ${:location} && ${git:location}/bin/git pull --quiet
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[slapos.cookbook-python2.6]
recipe = zc.recipe.egg
eggs = slapos.cookbook
scripts =
python = python2.6
[template-mariadb]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-mariadb.cfg
md5sum = 3225a2ba7337505bdeb95867f5028891
output = ${buildout:directory}/template-mariadb.cfg
mode = 0644
[template-sphinx]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-sphinx.cfg
md5sum = b6d196fb9014222dacef13a0d12a445d
output = ${buildout:directory}/template-sphinx.cfg
mode = 0644
# Additional Configuration
configurator_bt5_list = erp5_core_proxy_field_legacy erp5_full_text_myisam_catalog erp5_base erp5_workflow erp5_configurator erp5_configurator_standard erp5_configurator_maxma_demo erp5_configurator_ung
[template-zope]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-zope.cfg
md5sum = 769a7ba8e8c5865637e374376be8cbcd
output = ${buildout:directory}/template-zope.cfg
mode = 0644
[template-cloudooo]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-cloudooo.cfg
md5sum = 8ea2839e951f26af2bc74b9a8c0fa5c2
output = ${buildout:directory}/template-cloudooo.cfg
mode = 0644
[template-snippet-fontconfig]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/snippet-fontconfig.cfg
md5sum = 76c5d4cd1c8d48648684d9873f1ffed3
output = ${buildout:directory}/template-snippet-fontconfig.cfg
mode = 0644
[template-kumofs]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-kumofs.cfg
md5sum = c12d8dc1925d16a75247971086734aa8
output = ${buildout:directory}/template-kumofs.cfg
mode = 0644
[template]
# Default template for erp5 instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 07f09cca8ad4d7858bb40d723998a889
md5sum = 961d404f5726fce7c4d8b34d7e120077
output = ${buildout:directory}/template.cfg
mode = 0644
[template-memcached]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-memcached.cfg
md5sum = 9a1462f3e34b99f384ae47b48a3a733c
output = ${buildout:directory}/template-memcached.cfg
mode = 0644
[template-erp5-development]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-erp5-development.cfg
md5sum = 9af9db6f1220ffe42e030c81f9072669
output = ${buildout:directory}/template-erp5-development.cfg
mode = 0644
[template-erp5-production]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-erp5-production.cfg
md5sum = 32f6537110bbc29e7cc5613695863096
output = ${buildout:directory}/template-erp5-production.cfg
mode = 0644
[template-snippet-zeo]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/snippet-zeo.cfg
md5sum = ca9ee3825ff90c5c894ff1da8c601552
output = ${buildout:directory}/template-snippet-zeo.cfg
mode = 0644
[template-snippet-master]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/snippet-master.cfg
md5sum = c9bea484c2827954b9f9f728ce0342fb
output = ${buildout:directory}/template-snippet-master.cfg
mode = 0644
[template-snippet-zope]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/snippet-zope.cfg
md5sum = 99fcce6aa6120787870522898650800d
output = ${buildout:directory}/template-snippet-zope.cfg
mode = 0644
[template-snippet-backend]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/snippet-backend.cfg
md5sum = e2b26547ba1435ec1b8e8cd1de89e2c6
output = ${buildout:directory}/template-snippet-backend.cfg
mode = 0644
[validator]
# Default json schema for instance parameters.
recipe = slapos.recipe.template
......@@ -85,139 +193,3 @@ signature-certificate-list =
KrDm39slYD/1KoE5kB4l/p6KVOdeJ4I6xcgu9rnkqqHzDwI4v7e8/D3WZbpiFUsY
vaZhjNYKWQf79l6zXfOvphzJ
-----END CERTIFICATE-----
[erp5]
revision = 7d57428a5cfd0fceed70acb9e86cf274558d606c
[versions]
MySQL-python = 1.2.3
Paste = 1.7.5.1
PasteScript = 1.7.5
Products.CMFActionIcons = 2.1.3
Products.CMFCalendar = 2.2.2
Products.CMFCore = 2.2.5
Products.CMFDefault = 2.2.2
Products.CMFTopic = 2.2.1
Products.CMFUid = 2.2.1
Products.DCWorkflowGraph = 0.4.1
Products.ExternalEditor = 1.1.0
Products.GenericSetup = 1.6.4
Products.MimetypesRegistry = 2.0.3
Products.PluggableAuthService = 1.7.6
Products.PluginRegistry = 1.3b1
Products.TIDStorage = 5.4.7.dev-r45842
Products.Zelenium = 1.0.3
StructuredText = 2.11.1
Werkzeug = 0.8.1
cElementTree = 1.0.5-20051216
chardet = 1.0.1
cloudooo = 1.2.3
cloudooo.handler.ffmpeg = 0.1
cloudooo.handler.imagemagick = 0.1
cloudooo.handler.ooo = 0.2
cloudooo.handler.pdf = 0.1
csp-eventlet = 0.7.0
elementtree = 1.2.7-20070827-preview
erp5.recipe.cmmiforcei686 = 0.1.3
erp5diff = 0.8.1.5
eventlet = 0.9.16
feedparser = 5.1
five.localsitemanager = 2.0.5
greenlet = 0.3.1
hexagonit.recipe.cmmi = 1.5.0
hexagonit.recipe.download = 1.5.0
http-parser = 0.7.1
ipdb = 0.6.1
ipython = 0.11
meld3 = 0.6.7
ordereddict = 1.1
paramiko = 1.7.7.1
plone.recipe.command = 1.1
ply = 3.4
psutil = 0.4.0
python-ldap = 2.4.6
python-memcached = 1.47
restkit = 3.3.2
rtjp-eventlet = 0.3.2
slapos.cookbook = 0.38
slapos.recipe.build = 0.7
slapos.recipe.template = 2.2
threadframe = 0.2
timerserver = 2.0.2
urlnorm = 1.1.2
uuid = 1.30
validictory = 0.7.2
xupdate-processor = 0.4
# Required by:
# slapos.core==0.20
Flask = 0.8
# Required by:
# PasteScript==1.7.5
# cloudooo==1.2.3
PasteDeploy = 1.5.0
# Required by:
# cloudooo==1.2.3
WSGIUtils = 0.7
# Required by:
# cloudooo==1.2.3
# slapos.core==0.20
argparse = 1.1
# Required by:
# SOAPpy==0.12.0nxd001
fpconst = 0.7.2
# Required by:
# slapos.cookbook==0.38
netaddr = 0.7.6
# Required by:
# slapos.core==0.20
netifaces = 0.6
# Required by:
# cloudooo==1.2.3
python-magic = 0.4.0.1
# Required by:
# Products.CMFActionIcons==2.1.3
# Products.CMFCalendar==2.2.2
# Products.CMFCore==2.2.5
# Products.CMFDefault==2.2.2
# Products.CMFTopic==2.2.1
# Products.CMFUid==2.2.1
# Products.DCWorkflow==2.2.3nxd002
# Products.DCWorkflowGraph==0.4.1
# Products.ExternalEditor==1.1.0
# Products.GenericSetup==1.6.4
# Products.MimetypesRegistry==2.0.3
# Products.PluggableAuthService==1.7.6
# Products.PluginRegistry==1.3b1
# Products.TIDStorage==5.4.7.dev-r45842
# Products.Zelenium==1.0.3
# Zope2==2.12.20
# five.localsitemanager==2.0.5
# python-ldap==2.4.6
# slapos.cookbook==0.38
# slapos.core==0.20
# zc.buildout==1.6.0-dev-SlapOS-003
# zc.recipe.egg==1.2.2
# zope.deprecation==3.4.0
# zope.structuredtext==3.4.0
setuptools = 0.6c12dev-r88846
# Required by:
# slapos.cookbook==0.38
slapos.core = 0.20
# Required by:
# slapos.core==0.20
supervisor = 3.0a10
# Required by:
# slapos.cookbook==0.38
xml-marshaller = 0.9.7
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
parts =
connection-dict
testnode
pwgen
shell
shellinabox
certificate-authority
ca-shellinabox
[connection-dict]
recipe = slapos.cookbook:publish
url = http://[$${shellinabox:ipv6}]:$${shellinabox:port}/
password = $${pwgen:password}
[pwgen]
recipe = slapos.cookbook:pwgen
file = $${buildout:directory}/.password
pwgen-binary = ${pwgen:location}/bin/pwgen
[testnode]
recipe = slapos.cookbook:erp5testnode
slapos-directory = $${directory:slapos}
working-directory = $${directory:testnode}
test-suite-directory = $${directory:test-suite}
proxy-host = $${slap-network-information:global-ipv6}
proxy-port = 5000
log-directory = $${directory:log}
run-directory = $${directory:run}
test-suite-title = $${slap-parameter:test-suite-title}
test-node-title = $${slap-parameter:test-node-title}
test-suite = $${slap-parameter:test-suite}
node-quantity = $${slap-parameter:node-quantity}
project-title = $${slap-parameter:project-title}
ipv4-address = $${slap-network-information:local-ipv4}
ipv6-address = $${slap-network-information:global-ipv6}
test-suite-master-url = $${slap-parameter:test-suite-master-url}
bt5-path = $${slap-parameter:additional-bt5-repository-id}
instance-dict = $${slap-parameter:instance-dict}
repository-list = $${slap-parameter:vcs-repository-list}
git-binary = ${git:location}/bin/git
slapgrid-partition-binary = ${buildout:bin-directory}/slapgrid-cp
slapgrid-software-binary = ${buildout:bin-directory}/slapgrid-sr
slapproxy-binary = ${buildout:bin-directory}/slapproxy
svn-binary = ${subversion:location}/bin/svn
svnversion-binary = ${subversion:location}/bin/svnversion
testnode = ${buildout:bin-directory}/testnode
zip-binary = ${zip:location}/bin/zip
configuration-file = $${rootdirectory:etc}/erp5testnode.cfg
log-file = $${basedirectory:log}/erp5testnode.log
wrapper = $${basedirectory:services}/erp5testnode
[shell]
recipe = slapos.cookbook:shell
wrapper = $${rootdirectory:bin}/sh
shell = ${busybox:location}/bin/sh
home = $${buildout:directory}
ps1 = "\\w> "
path =
${busybox:location}/bin/
${busybox:location}/usr/bin/
${git:location}/bin/
${subversion:location}/bin/
${perl:location}/bin/
${python2.7:location}/bin/
${buildout:bin-directory}/
${busybox:location}/sbin/
${busybox:location}/usr/sbin/
[shellinabox]
recipe = slapos.cookbook:shellinabox
ipv6 = $${slap-network-information:global-ipv6}
port = 8080
shell = $${shell:wrapper}
wrapper = $${rootdirectory:bin}/shellinaboxd
shellinabox-binary = ${shellinabox:location}/bin/shellinaboxd
password = $${pwgen:password}
directory = $${buildout:directory}/
login-shell = $${rootdirectory:bin}/login
certificate-directory = $${directory:shellinabox}
cert-file = $${directory:shellinabox}/public.crt
key-file = $${directory:shellinabox}/private.key
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs/
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[ca-shellinabox]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = $${shellinabox:wrapper}
wrapper = $${basedirectory:services}/shellinaboxd
key-file = $${shellinabox:key-file}
cert-file = $${shellinabox:cert-file}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
tmp = $${buildout:directory}/tmp/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
slapos = $${rootdirectory:srv}/slapos/
testnode = $${rootdirectory:srv}/testnode/
test-suite = $${rootdirectory:srv}/test_suite/
log = $${basedirectory:log}/testnode/
run = $${basedirectory:run}/testnode/
shellinabox = $${rootdirectory:srv}/shellinabox/
ca-dir = $${rootdirectory:srv}/ca/
[slap-parameter]
node-quantity = 1
test-suite-master-url =
additional-bt5-repository-id =
instance-dict =
vcs-repository-list = []
test-suite-title = $${:test-suite}
[buildout]
parts =
testnode
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[testnode]
recipe = slapos.cookbook:erp5testnode
git_binary = ${git:location}/bin/git
slapgrid_partition_binary = ${buildout:bin-directory}/slapgrid-cp
slapgrid_software_binary = ${buildout:bin-directory}/slapgrid-sr
slapproxy_binary = ${buildout:bin-directory}/slapproxy
svn_binary = ${subversion:location}/bin/svn
svnversion_binary = ${subversion:location}/bin/svnversion
testnode = ${buildout:bin-directory}/testnode
zip_binary = ${zip:location}/bin/zip
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-default:output}
......@@ -18,8 +18,17 @@ extends =
../../component/git/buildout.cfg
../../component/lxml-python/buildout.cfg
../../component/zip/buildout.cfg
../../component/busybox/buildout.cfg
../../component/shellinabox/buildout.cfg
../../component/pwgen/buildout.cfg
# Local development
develop =
${:parts-directory}/slapos.cookbook-repository
parts =
# Local development
slapos.cookbook-repository
check-recipe
template
lxml-python
eggs
......@@ -43,6 +52,27 @@ allow-hosts =
peak.telecommunity.com
psutil.googlecode.com
www.dabeaz.com
launchpad.net
# XXX: Workaround of SlapOS limitation
# Unzippig of eggs is required, as SlapOS do not yet provide nicely working
# development / fast switching environment for whole software
unzip = true
# Local development
[slapos.cookbook-repository]
recipe = plone.recipe.command
stop-on-error = true
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = ${git:location}/bin/git clone --quiet http://git.erp5.org/repos/slapos.git ${:location}
update-command = cd ${:location} && ${git:location}/bin/git pull --quiet
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link
[eggs]
recipe = zc.recipe.egg
......@@ -51,6 +81,7 @@ eggs =
zc.buildout
slapos.libnetworkcache
slapos.core
inotifyx
slapos.cookbook
erp5.util[testnode]
......@@ -70,10 +101,17 @@ recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
mode = 0644
md5sum = 08e3f92bce41efc5bfe044bb9d354786
md5sum = 307663d73ef3ef94b02567ecd322252e
[template-default]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-default.cfg
output = ${buildout:directory}/template-default.cfg
mode = 0644
md5sum = 2349e62d4fa46fc88e05d855f49b7c38
[networkcache]
# Romain Courteaud + Sebastien Robin signature certificate
# Romain Courteaud + Sebastien Robin + Antoine Catton signature certificate
signature-certificate-list =
-----BEGIN CERTIFICATE-----
MIIB4DCCAUkCADANBgkqhkiG9w0BAQsFADA5MQswCQYDVQQGEwJGUjEZMBcGA1UE
......@@ -101,6 +139,19 @@ signature-certificate-list =
R/KCo6D0sw0ZgeQv1aUXbl/xJ9k4jlTxmWbPeiiPZEqU1W9wN5lkGuLxV4CEGTKU
hJA/yXa1wbwIPGvX3tVKdOEWPRXZLg==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB8DCCAVmgAwIBAgIJAJ5iKK7w6AQVMA0GCSqGSIb3DQEBBQUAMBExDzANBgNV
BAMMBkNPTVAtMTAeFw0xMTExMjkxNjU1NDZaFw0xMjExMjgxNjU1NDZaMBExDzAN
BgNVBAMMBkNPTVAtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6IBOEnQP
qT1KPBT8xQnYe6ECxXkfV9zhuVwolB/2wHM1Qk0TRoOPRRHfOvzkLQUVkD9on6Dj
mJsYhrukqaidv0Y9n7c5JKM8DfFh41UP3wG6rJpPJ9VZ0ZzFjk+yzCSYrI6rf3Yd
vRhNbKHfBO+23DJJqeEiobxsOQc6eE9Gt78CAwEAAaNQME4wHQYDVR0OBBYEFDLf
TAc8fEZjGWNA62Tld7iSrgKgMB8GA1UdIwQYMBaAFDLfTAc8fEZjGWNA62Tld7iS
rgKgMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA28CQAN/jC3X+5lOh
wYO0+HvXSa6TNy6k+xAXQHEezOSgA7QMjXGVPHNzZRxhI4k6mdGImmPlKfV/GgcA
T3XATu8g033m2pCtNXW6Awp4V2Mft+w5Hx4Y3iT0Uccxc/699PU/MGroBKVCphwB
/SvdKJ6l9YsKUKhJlMRQxc9QHdU=
-----END CERTIFICATE-----
[versions]
# Use SlapOS patched zc.buildout
......
[buildout]
parts =
java
versions = versions
[java]
recipe = slapos.recipe.java
......@@ -12,7 +12,6 @@ parts =
extends =
../../stack/lamp.cfg
../../stack/shacache-client.cfg
[application]
recipe = hexagonit.recipe.download
......
......@@ -7,8 +7,8 @@ parts = instance
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
source = ${application:location}
#template = ${application-template:location}/${application-template:filename}
#configuration = ${application-configuration:location}
template = ${application-template:location}/${application-template:filename}
configuration = ${application-configuration:location}
httpd_binary = ${apache:location}/bin/httpd
mysql_binary = ${mariadb:location}/bin/mysql
......
......@@ -10,8 +10,7 @@ parts =
downloadcache-workaround
extends =
http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.60:/stack/lamp.cfg
http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.60:/stack/shacache-client.cfg
../../stack/lamp.cfg
[application]
recipe = slapos.recipe.build:download-unpacked
......@@ -23,7 +22,7 @@ strip-top-level-dir = true
[application-template]
recipe = slapos.recipe.download
url = Student shall put here url to template file for application
url = ${:_profile_base_location_}/template/REPLACE_ME_BY_APPLICATION_TEMPLATE_NAME
#md5sum = Student may put here md5sum of this file, this is good idea
filename = template.in
mode = 0644
......@@ -54,7 +53,71 @@ eggs =
[versions]
# Use SlapOS patched zc.buildout
zc.buildout = 1.5.3-dev-SlapOS-010
zc.buildout = 1.6.0-dev-SlapOS-003
# Generated by buildout-versions
Jinja2 = 2.6
Werkzeug = 0.8.2
buildout-versions = 1.7
hexagonit.recipe.cmmi = 1.5.0
meld3 = 0.6.8
plone.recipe.command = 1.1
slapos.cookbook = 0.20
slapos.recipe.build = 0.7
slapos.recipe.download = 1.0.dev-r4053
slapos.recipe.template = 2.2
# Required by:
# slapos.core==0.21
Flask = 0.8
# Required by:
# slapos.cookbook==0.38
PyXML = 0.8.4
# Required by:
# hexagonit.recipe.cmmi==1.5.0
hexagonit.recipe.download = 1.5.0
# Required by:
# slapos.cookbook==0.38
# slapos.core==0.21
# xml-marshaller==0.9.7
lxml = 2.3.3
# Required by:
# slapos.cookbook==0.38
netaddr = 0.7.6
# Required by:
# slapos.core==0.21
netifaces = 0.6
# Required by:
# slapos.cookbook==0.38
# slapos.core==0.21
# zc.buildout==1.6.0-dev-SlapOS-003
# zc.recipe.egg==1.3.2
setuptools = 0.6c12dev-r88846
# Required by:
# slapos.cookbook==0.38
slapos.core = 0.21
# Required by:
# slapos.core==0.21
supervisor = 3.0a12
# Required by:
# slapos.cookbook==0.38
xml-marshaller = 0.9.7
# Required by:
# slapos.cookbook==0.38
zc.recipe.egg = 1.3.2
# Required by:
# slapos.core==0.21
zope.interface = 3.8.0
[downloadcache-workaround]
# workaround irritating problem of hexagonit.recipe.cmmi which automatically
......
<?php
$cfg['blowfish_secret'] = ''; /* YOU MUST FILL IN THIS FOR COOKIE AUTH! */
$i = 0;
$i++;
$i = 1;
/* Server parameters */
$cfg['Servers'][$i]['host'] = 'Put here mysql host template key';
$cfg['Servers'][$i]['port'] = 'Put here mysql port template key';
......
......@@ -6,7 +6,7 @@ parts +=
vifib
check-slapos.core
develop =
develop +=
${:parts-directory}/vifib
[check-slapos.core]
......
[buildout]
parts =
apache-php
request-mariadb
mkdir
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[apache-php]
recipe = slapos.cookbook:lamp.request
source = ${application:location}
template = ${application-template:location}/${application-template:filename}
configuration = ${application-configuration:location}
httpd_binary = ${apache:location}/bin/httpd
stunnel_binary = ${stunnel:location}/bin/stunnel
mysql-url = $${request-mariadb:connection-url}
[request-mariadb]
recipe = slapos.cookbook:request
name = MariaDB DataBase
software-url = $${slap_connection:software_release_url}
software-type = mariadb
return = url
[buildout]
versions = versions
parts =
apache-php
template
eggs
instance-recipe-egg
instance_wordpress
instance_mariadb
extends =
../../component/gzip/buildout.cfg
../../stack/apache-php.cfg
../../stack/shacache-client.cfg
../../stack/lamp/buildout.cfg
# XXX: This is dirty, recipe trick shall be used.
develop =
/opt/slapdev
[application]
url = http://wordpress.org/latest.tar.gz
......@@ -33,51 +21,8 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
[application-configuration]
location = wp-config.php
md5sum = 608dd9003a8edeb59c3aabc6cf43bbf9
[instance-recipe]
egg = slapos.cookbook
module = lamp.request
[instance-recipe-egg]
recipe = zc.recipe.egg
eggs = ${instance-recipe:egg}
[template]
# Default template for the instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 817407b6bb7af1dce7588e259ead0d26
output = ${buildout:directory}/template.cfg
mode = 0644
[instance_mariadb]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/../mariadb/instance-mariadb.cfg
md5sum = 38aefa725d21988485c20ae9d58f9455
output = ${buildout:directory}/template-mariadb.cfg
mode = 0644
[instance_wordpress]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-wordpress.cfg
md5sum = 220e14b7a46742b1fd83d699c352b888
output = ${buildout:directory}/template-wordpress.cfg
mode = 0644
md5sum = 3859841a4ad6ed23744d6bc5a01321e0
[versions]
#slapos.cookbook = 0.12
# Required by slapos.cookbook==0.12
slapos.core = 0.8
collective.recipe.template = 1.8
netaddr = 0.7.5
xml-marshaller = 0.9.7
setuptools = 0.6c12dev-r88795
hexagonit.recipe.cmmi = 1.5.0
hexagonit.recipe.download = 1.5.0
plone.recipe.command = 1.1
# Use SlapOS patched zc.buildout
zc.buildout = 1.6.0-dev-SlapOS-002
zc.buildout = 1.6.0-dev-SlapOS-003
......@@ -42,14 +42,14 @@ define('DB_COLLATE', '');
*
* @since 2.6.0
*/
define('AUTH_KEY', 'put your unique phrase here');
define('SECURE_AUTH_KEY', 'put your unique phrase here');
define('LOGGED_IN_KEY', 'put your unique phrase here');
define('NONCE_KEY', 'put your unique phrase here');
define('AUTH_SALT', 'put your unique phrase here');
define('SECURE_AUTH_SALT', 'put your unique phrase here');
define('LOGGED_IN_SALT', 'put your unique phrase here');
define('NONCE_SALT', 'put your unique phrase here');
define('AUTH_KEY', '%(secret_key)s');
define('SECURE_AUTH_KEY', '%(secret_key)s');
define('LOGGED_IN_KEY', '%(secret_key)s');
define('NONCE_KEY', '%(secret_key)s');
define('AUTH_SALT', '%(secret_key)s');
define('SECURE_AUTH_SALT', '%(secret_key)s');
define('LOGGED_IN_SALT', '%(secret_key)s');
define('NONCE_SALT', '%(secret_key)s');
/**#@-*/
......
[buildout]
find-links +=
http://www.nexedi.org/static/packages/source/slapos.buildout/
# Use only quite well working sites.
allow-hosts =
*.nexedi.org
*.python.org
*.sourceforge.net
dist.repoze.org
effbot.org
github.com
peak.telecommunity.com
psutil.googlecode.com
www.dabeaz.com
parts =
template
apache-php
mariadb
eggs
instance-recipe-egg
stunnel
extends =
../component/apache/buildout.cfg
../component/apache-php/buildout.cfg
../component/dcron/buildout.cfg
../component/git/buildout.cfg
../component/glib/buildout.cfg
../component/logrotate/buildout.cfg
../component/python-2.7/buildout.cfg
../component/lxml-python/buildout.cfg
../component/zlib/buildout.cfg
../component/stunnel/buildout.cfg
[application]
recipe = hexagonit.recipe.download
#If provided tarball does not containt top directory this option shall be changed to false
strip-top-level-dir = true
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
......@@ -9,7 +9,7 @@ find-links = http://www.nexedi.org/static/packages/source/slapos.buildout/
http://www.nexedi.org/static/packages/source/
extends =
../component/libreoffice-bin/buildout.cfg
../component/openoffice-bin/buildout.cfg
../component/lxml-python/buildout.cfg
../component/python-2.6/buildout.cfg
../component/python-2.7/buildout.cfg
......@@ -24,7 +24,7 @@ extends =
versions = versions
parts =
libreoffice-bin
openoffice-bin
# basic Xorg
libXdmcp
......
......@@ -31,7 +31,7 @@ allow-hosts =
extends =
# Exact version of Zope
http://svn.zope.org/repos/main/Zope/tags/2.12.20/versions.cfg
http://svn.zope.org/repos/main/Zope/tags/2.12.21/versions.cfg
../component/logrotate/buildout.cfg
../component/dcron/buildout.cfg
../component/file/buildout.cfg
......@@ -110,7 +110,6 @@ parts =
# Buildoutish
eggs
instance-recipe-egg
testrunner
test_suite_runner
......@@ -138,6 +137,8 @@ parts =
# fix products path (Zeo needs ERP5 Products to be available for
# conflict resolution)
fix-products-paths
# some additional utils
slapos-toolbox
[bt5-repository]
# Format:
......@@ -225,8 +226,8 @@ stop-on-error = true
repository = http://git.erp5.org/repos/erp5.git
branch = master
revision =
command = ${git:location}/bin/git clone --quiet -b ${:branch} ${:repository} ${:location} && if [ -n ${:revision} ]; then cd ${:location} && ${git:location}/bin/git reset --quiet --hard ${:revision} ; fi
update-command = cd ${:location} && ${git:location}/bin/git pull --quiet && if [ -n ${:revision} ]; then cd ${:location} && ${git:location}/bin/git reset --quiet --hard ${:revision} ; fi
command = ${git:location}/bin/git clone --quiet -b ${:branch} ${:repository} ${:location} && if [ -n "${:revision}" ]; then cd ${:location} && ${git:location}/bin/git reset --quiet --hard ${:revision} ; fi
update-command = cd ${:location} && ${git:location}/bin/git pull --quiet && if [ -n "${:revision}" ]; then cd ${:location} && ${git:location}/bin/git reset --quiet --hard ${:revision} ; fi
[products]
# XXX: ERP5 related products are not defined as python distributions, so it is
......@@ -295,13 +296,6 @@ initialization =
repository_id_list = list(reversed('''${erp5_repository_list:repository_id_list}'''.split()))
sys.path[0:0] = ['/'.join(['''${buildout:parts-directory}''', x]) for x in repository_id_list]
[instance-recipe-egg]
recipe = zc.recipe.egg
python = python2.6
eggs =
# instantiation egg
${instance-recipe:egg}
[eggs]
recipe = zc.recipe.egg
python = python2.6
......@@ -315,6 +309,7 @@ eggs =
SOAPpy
cElementTree
chardet
coverage
elementtree
erp5diff
ipdb
......@@ -396,6 +391,17 @@ python = python2.6
[pysvn-python]
python = python2.6
[slapos-toolbox]
recipe = zc.recipe.egg
python = ${eggs:python}
eggs =
${lxml-python:egg}
slapos.toolbox
scripts =
killpidfromfile = slapos.systool:killpidfromfile
onetimedownload = slapos.toolbox:onetimedownload
[cloudooo]
recipe = zc.recipe.egg
python = python2.6
......
[buildout]
extensions = buildout-versions
find-links +=
http://www.nexedi.org/static/packages/source/slapos.buildout/
......@@ -35,6 +37,7 @@ extends =
../component/stunnel/buildout.cfg
../component/pycrypto-python/buildout.cfg
../component/mysql-python/buildout.cfg
shacache-client.cfg
../component/python-2.7/buildout.cfg
# python-2.7 component is here only for compatibility with old software.cfg.
# It is not needed and should not be used in LAMP-based software.cfg
......
[buildout]
find-links +=
http://www.nexedi.org/static/packages/source/slapos.buildout/
parts =
apache-php
mariadb
mydumper
mysql-python
rdiff-backup
dropbear
eggs
instance-recipe-egg
template
template-apache-php
template-mariadb
template-mariadb-pbsready
template-mariadb-pbsready-import
template-mariadb-pbsready-export
allow-hosts =
*.nexedi.org
*.python.org
*.sourceforge.net
alastairs-place.net
dist.repoze.org
effbot.org
github.com
peak.telecommunity.com
psutil.googlecode.com
www.dabeaz.com
launchpad.net
extends =
../shacache-client.cfg
../../component/mariadb/buildout.cfg
../../component/apache/buildout.cfg
../../component/apache-php/buildout.cfg
../../component/dcron/buildout.cfg
../../component/git/buildout.cfg
../../component/glib/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/python-2.7/buildout.cfg
../../component/perl/buildout.cfg
../../component/sqlite3/buildout.cfg
../../component/lxml-python/buildout.cfg
../../component/zlib/buildout.cfg
../../component/rdiff-backup/buildout.cfg
../../component/gzip/buildout.cfg
../../component/stunnel/buildout.cfg
../../component/mydumper/buildout.cfg
../../component/mysql-python/buildout.cfg
../../component/dropbear/buildout.cfg
versions = versions
[mariadb]
# Compile dir is for plugins, there's no plugin in LAMP
keep-compile-dir = false
[instance-recipe]
egg = slapos.cookbook
module = lamp.request
[instance-recipe-egg]
recipe = zc.recipe.egg
#python = python2.7
eggs = ${instance-recipe:egg}
[application]
recipe = hexagonit.recipe.download
#If provided tarball does not containt top directory this option shall be changed to false
strip-top-level-dir = true
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = 8117f10e814a13c5376af4c01e6546d4
mode = 0644
[template-apache-php]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-php.cfg
output = ${buildout:directory}/template-apache-php.cfg
md5sum = 45bc82dc468e7f418d95c846d1a33d74
mode = 0644
[template-apache-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-backup.cfg
output = ${buildout:directory}/template-apache-backup.cfg
md5sum = cfb77ac8785e0d125a785f69a5339014
mode = 0644
[template-backuped]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-backuped.cfg
output = ${buildout:directory}/template-backuped.cfg
md5sum = f43d1c6412ea8dc83b75573dc00daf9e
mode = 0644
[template-mariadb]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-mariadb.cfg
output = ${buildout:directory}/template-mariadb.cfg
md5sum = 70ab558c2e925c3627c39fbc20a7501c
mode = 0644
[template-mariadb-pbsready]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-mariadb-pbsready.cfg
output = ${buildout:directory}/template-mariadb-pbsready.cfg
md5sum = d2a580dcd7efdd528be45c5ffadfe760
mode = 0644
[template-mariadb-pbsready-import]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-mariadb-pbsready-import.cfg
output = ${buildout:directory}/template-mariadb-pbsready-import.cfg
md5sum = 4a96ff02da3898fef7077fa8baec81ac
mode = 0644
[template-mariadb-pbsready-export]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-mariadb-pbsready-export.cfg
output = ${buildout:directory}/template-mariadb-pbsready-export.cfg
md5sum = 11a9e45e8bc590bc11bfdd304b07a4a5
mode = 0644
[template-pull-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg
output = ${buildout:directory}/template-pull-backup.cfg
md5sum = 9aab30ba5aa23a37d4b507e7c414be00
mode = 0644
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
slapos.toolbox
[buildout]
parts =
urls
apache-proxy
logrotate
logrotate-entry-apache
cron
cron-entry-logrotate
sshkeys-authority
sshkeys-dropbear
dropbear-server
dropbear-server-pbs-authorized-key
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[urls]
recipe = slapos.cookbook:publish
url = http://[$${apache-proxy:ip}]:$${apache-proxy:port}/
ssh-public-key = $${sshkeys-dropbear:public-key-value}
ssh-url = ssh://nobody@[$${dropbear-server:host}]:$${dropbear-server:port}/$${rdiff-backup-server:path}
[apache-proxy]
recipe = slapos.cookbook:apacheproxy
url = $${slap-parameter:proxy-url}
pid-file = $${basedirectory:run}/apache.pid
lock-file = $${basedirectory:run}/apache.lock
ip = $${slap-network-information:global-ipv6}
port = 8080
error-log = $${directory:httpd-log}/error.log
access-log = $${directory:httpd-log}/access.log
httpd-conf = $${rootdirectory:etc}/apache.conf
wrapper = $${basedirectory:services}/apache
promise = $${basedirectory:promises}/apache
httpd-binary = ${apache:location}/bin/httpd
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:sshkeys}/requests/
keys = $${directory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
wrapper = $${basedirectory:services}/sshkeys_authority
keygen-binary = ${dropbear:location}/bin/dropbearkey
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = dropbear
type = rsa
executable = $${dropbear-server:wrapper}
public-key = $${dropbear-server:rsa-keyfile}.pub
private-key = $${dropbear-server:rsa-keyfile}
wrapper = $${basedirectory:services}/sshd
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
home = $${directory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = $${rdiff-backup-server:wrapper}
rsa-keyfile = $${directory:ssh}/server_key.rsa
dropbear-binary = ${dropbear:location}/sbin/dropbear
[dropbear-server-pbs-authorized-key]
<= dropbear-server
recipe = slapos.cookbook:dropbear.add_authorized_key
key = $${slap-parameter:authorized-key}
[rdiff-backup-server]
recipe = slapos.cookbook:pbs
client = false
path = $${directory:htdocs}
wrapper = $${rootdirectory:bin}/rdiffbackup-server
rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-apache]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = apache
log = $${apache-proxy:error-log} $${apache-proxy:access-log}
frequency = daily
rotate-num = 30
sharedscripts = true
notifempty = true
create = true
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
tmp = $${buildout:directory}/tmp/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
htdocs = $${rootdirectory:srv}/www/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
logrotate-backup = $${basedirectory:backup}/logrotate/
cronstamps = $${rootdirectory:etc}/cronstamps/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
ssh = $${rootdirectory:etc}/ssh/
sshkeys = $${rootdirectory:srv}/sshkeys
httpd-log = $${basedirectory:log}/apache/
[buildout]
parts =
url
request-mariadb
apache-php
stunnel
certificate-authority
ca-stunnel
logrotate
logrotate-entry-apache
logrotate-entry-stunnel
cron
cron-entry-logrotate
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[url]
recipe = slapos.cookbook:publishurl
url = http://[$${apache-php:ip}]:$${apache-php:port}/
[mariadb-urlparse]
recipe = slapos.cookbook:urlparse
url = $${request-mariadb:connection-url}
[apache-php]
recipe = slapos.cookbook:apachephp
source = ${application:location}
template = ${application-template:location}/${application-template:filename}
configuration = ${application-configuration:location}
htdocs = $${directory:htdocs}
pid-file = $${basedirectory:run}/apache.pid
lock-file = $${basedirectory:run}/apache.lock
ip = $${slap-network-information:global-ipv6}
port = 8080
error-log = $${directory:httpd-log}/error.log
access-log = $${directory:httpd-log}/access.log
php-ini-dir = $${directory:php-ini-dir}
tmp-dir = $${directory:tmp-php}
httpd-conf = $${rootdirectory:etc}/apache.conf
wrapper = $${basedirectory:services}/apache
promise = $${basedirectory:promises}/apache
httpd-binary = ${apache:location}/bin/httpd
mysql-username = $${mariadb-urlparse:username}
mysql-password = $${mariadb-urlparse:password}
mysql-database = $${mariadb-urlparse:path}
mysql-host = $${stunnel:local-host}
mysql-port = $${stunnel:local-port}
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs/
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[ca-stunnel]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = $${stunnel:wrapper}
wrapper = $${basedirectory:services}/stunnel
key-file = $${stunnel:key-file}
cert-file = $${stunnel:cert-file}
[stunnel]
recipe = slapos.cookbook:stunnel
client = true
stunnel-binary = ${stunnel:location}/bin/stunnel
remote-host = $${mariadb-urlparse:host}
remote-port = $${mariadb-urlparse:port}
local-host = $${slap-network-information:local-ipv4}
local-port = 3306
log-file = $${basedirectory:log}/stunnel.log
config-file = $${directory:stunnel-conf}/stunnel.conf
key-file = $${directory:stunnel-conf}/stunnel.key
cert-file = $${directory:stunnel-conf}/stunnel.crt
pid-file = $${basedirectory:run}/stunnel.pid
wrapper = $${rootdirectory:bin}/raw_stunnel
post-rotate-script = $${rootdirectory:bin}/stunnel_post_rotate
[request-mariadb]
<= slap-connection
recipe = slapos.cookbook:request
name = MariaDB
software-url = $${slap-connection:software-release-url}
software-type = mariadb
return = url
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-apache]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = apache
log = $${apache-php:error-log} $${apache-php:access-log}
frequency = daily
rotate-num = 30
sharedscripts = true
notifempty = true
create = true
[logrotate-entry-stunnel]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = stunnel
log = $${stunnel:log-file}
frequency = daily
rotate-num = 30
notifempty = true
create = true
post = $${stunnel:post-rotate-script}
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
tmp = $${buildout:directory}/tmp/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
ca-dir = $${rootdirectory:srv}/ssl/
htdocs = $${rootdirectory:srv}/www/
httpd-log = $${basedirectory:log}/apache/
php-ini-dir = $${rootdirectory:etc}/php/
tmp-php = $${rootdirectory:tmp}/php/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
logrotate-backup = $${basedirectory:backup}/logrotate/
stunnel-conf = $${rootdirectory:etc}/stunnel/
cronstamps = $${rootdirectory:etc}/cronstamps/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
[buildout]
extends =
${template-apache-php:output}
parts =
request-mariadb
request-mariadb-pseudo-replicating
request-apache-backup
request-pull-backup-server
request-pull-backup-server-mariadb
request-pull-backup-server-mariadb-backup
request-pull-backup-server-apache
request-pull-backup-server-apache-backup
url
apache-php
stunnel
certificate-authority
ca-stunnel
logrotate
logrotate-entry-apache
logrotate-entry-stunnel
cron
cron-entry-logrotate
dropbear-server
sshkeys-authority
dropbear-server-pbs-authorized-key
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:sshkeys}/requests/
keys = $${directory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
wrapper = $${basedirectory:services}/sshkeys_authority
keygen-binary = ${dropbear:location}/bin/dropbearkey
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = dropbear
type = rsa
executable = $${dropbear-server:wrapper}
public-key = $${dropbear-server:rsa-keyfile}.pub
private-key = $${dropbear-server:rsa-keyfile}
wrapper = $${basedirectory:services}/sshd
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
home = $${directory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = $${rdiff-backup-server:wrapper}
rsa-keyfile = $${directory:ssh}/server_key.rsa
dropbear-binary = ${dropbear:location}/sbin/dropbear
[dropbear-server-pbs-authorized-key]
<= dropbear-server
recipe = slapos.cookbook:dropbear.add_authorized_key
key = $${request-pull-backup-server:connection-ssh-key}
[rdiff-backup-server]
recipe = slapos.cookbook:pbs
client = false
path = $${directory:htdocs}
wrapper = $${rootdirectory:bin}/rdiffbackup-server
rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup
[request-pull-backup-server]
<= slap-connection
recipe = slapos.cookbook:request
name = PBS (Pull Backup Server)
software-url = $${slap-connection:software-release-url}
software-type = pull-backup
return = ssh-key notification-url feeds-url
slave = false
[request-mariadb]
software-type = mariadb-pbsready-export
config = authorized-key notify
config-authorized-key = $${request-pull-backup-server:connection-ssh-key}
config-notify = $${request-pull-backup-server:connection-notification-url}
return = url ssh-public-key ssh-url notification-id
[request-mariadb-pseudo-replicating]
<= slap-connection
recipe = slapos.cookbook:request
name = MariaDB Backup
software-url = $${slap-connection:software-release-url}
software-type = mariadb-pbsready-import
return = url ssh-public-key ssh-url notification-url
config = authorized-key on-notification
config-authorized-key = $${request-pull-backup-server:connection-ssh-key}
pbs-notification-id = $${slap-connection:computer-id}-$${slap-connection:partition-id}-mariadb-push
config-on-notification = $${request-pull-backup-server:connection-feeds-url}$${:pbs-notification-id}
[request-apache-backup]
<= slap-connection
recipe = slapos.cookbook:request
name = Apache Backup
software-url = $${slap-connection:software-release-url}
software-type = apache-backup
return = url ssh-url ssh-public-key
config = authorized-key proxy-url
config-authorized-key = $${request-pull-backup-server:connection-ssh-key}
config-proxy-url = $${url:url}
[request-pull-backup-server-apache]
<= request-pull-backup-server
slap-connection
recipe = slapos.cookbook:request
name = PBS pulling from Apache
software-url = $${slap-connection:software-release-url}
config = url name type server-key notify notification-id frequency
config-url = ssh://nobody@[$${dropbear-server:host}]:$${dropbear-server:port}/$${rdiff-backup-server:path}
config-name = $${slap-connection:computer-id}-$${slap-connection:partition-id}-apache
config-type = pull
config-server-key = $${sshkeys-dropbear:public-key-value}
config-notify = $${request-pull-backup-server:connection-notification-url}
config-notification-id = $${slap-connection:computer-id}-$${slap-connection:partition-id}-apache-pull
config-frequency = 30 * * * *
slave = true
[request-pull-backup-server-apache-backup]
<= request-pull-backup-server
slap-connection
recipe = slapos.cookbook:request
name = PBS pushing to $${request-apache-backup:name}
software-url = $${slap-connection:software-release-url}
config = url name type server-key on-notification
config-url = $${request-apache-backup:connection-ssh-url}
config-name = $${request-pull-backup-server-apache:config-name}
config-type = push
config-server-key = $${request-apache-backup:connection-ssh-public-key}
config-on-notification = $${request-pull-backup-server:connection-feeds-url}$${request-pull-backup-server-apache:config-notification-id}
slave = true
[request-pull-backup-server-mariadb]
<= request-pull-backup-server
slap-connection
name = PBS pulling from $${request-mariadb:name}
config = url name type server-key on-notification notify notification-id title
config-url = $${request-mariadb:connection-ssh-url}
config-name = $${slap-connection:computer-id}-$${slap-connection:partition-id}-mariadb
config-type = pull
config-server-key = $${request-mariadb:connection-ssh-public-key}
config-on-notification = $${request-mariadb:connection-notification-id}
config-notify = $${request-pull-backup-server:connection-notification-url}
config-notification-id = $${slap-connection:computer-id}-$${slap-connection:partition-id}-mariadb-pull
config-title = Pulling from MariaDB
slave = true
[request-pull-backup-server-mariadb-backup]
<= request-pull-backup-server
slap-connection
name = PBS pushing on $${request-mariadb-pseudo-replicating:name}
config = url name type server-key on-notification notify notification-id title
config-url = $${request-mariadb-pseudo-replicating:connection-ssh-url}
config-name = $${request-pull-backup-server-mariadb:config-name}
config-type = push
config-server-key = $${request-mariadb-pseudo-replicating:connection-ssh-public-key}
config-on-notification = $${request-pull-backup-server:connection-feeds-url}$${request-pull-backup-server-mariadb:config-notification-id}
config-notify = $${request-mariadb-pseudo-replicating:connection-notification-url}
config-notification-id = $${request-mariadb-pseudo-replicating:pbs-notification-id}
config-title = Pushing to MariaDB backup
slave = true
[directory]
ssh = $${rootdirectory:etc}/ssh/
sshkeys = $${rootdirectory:srv}/sshkeys
[buildout]
extends = ${template-mariadb-pbsready:output}
parts += cron-entry-mariadb-backup
[urls]
notification-id = http://[$${notifier:host}]:$${notifier:port}/get/$${notifier-mydumper:name}
[mydumper]
recipe = slapos.cookbook:mydumper
wrapper = $${rootdirectory:bin}/raw_mydumper
backup-directory = $${directory:mariadb-backup}
socket = $${mariadb:socket}
user = root
mydumper-binary = ${mydumper:location}/bin/mydumper
database = $${mariadb:database}
import = false
[notifier-mydumper]
<= notifier
recipe = slapos.cookbook:notifier.notify
name = mydumper
title = Dumping MariaDB Database
executable = $${mydumper:wrapper}
wrapper = $${rootdirectory:bin}/mydumper
notify = $${slap-parameter:notify}
[cron-entry-mariadb-backup]
<= cron
recipe = slapos.cookbook:cron.d
name = backup
frequency = 0 * * * *
command = $${notifier-mydumper:wrapper}
[buildout]
extends = ${template-mariadb-pbsready:output}
parts += mariadb-import-on-notification
[urls]
notification-url = http://[$${notifier:host}]:$${notifier:port}/notify
[mydumper-import]
recipe = slapos.cookbook:mydumper
wrapper = $${rootdirectory:bin}/myloader
backup-directory = $${directory:mariadb-backup}
socket = $${mariadb:socket}
user = root
myloader-binary = ${mydumper:location}/bin/myloader
database = $${mariadb:database}
import = true
[mariadb-import-on-notification]
<= notifier
recipe = slapos.cookbook:notifier.callback
on-notification-id = $${slap-parameter:on-notification}
callback = $${mydumper-import:wrapper}
[buildout]
extends = ${template-mariadb:output}
parts =
urls
mariadb
stunnel
certificate-authority
ca-stunnel
logrotate
logrotate-entry-mariadb
logrotate-entry-stunnel
logrotate-entry-cron
logrotate-entry-equeue
cron
cron-entry-logrotate
sshkeys-authority
dropbear-server
sshkeys-dropbear
dropbear-server-pbs-authorized-key
notifier
[urls]
ssh-public-key = $${sshkeys-dropbear:public-key-value}
ssh-url = ssh://nobody@[$${dropbear-server:host}]:$${dropbear-server:port}/$${rdiff-backup-server:path}
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:sshkeys}/requests/
keys = $${directory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
wrapper = $${basedirectory:services}/sshkeys_authority
keygen-binary = ${dropbear:location}/bin/dropbearkey
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = dropbear
type = rsa
executable = $${dropbear-server:wrapper}
public-key = $${dropbear-server:rsa-keyfile}.pub
private-key = $${dropbear-server:rsa-keyfile}
wrapper = $${basedirectory:services}/sshd
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
home = $${directory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = $${rdiff-backup-server:wrapper}
rsa-keyfile = $${directory:ssh}/server_key.rsa
dropbear-binary = ${dropbear:location}/sbin/dropbear
[dropbear-server-pbs-authorized-key]
<= dropbear-server
recipe = slapos.cookbook:dropbear.add_authorized_key
key = $${slap-parameter:authorized-key}
[rdiff-backup-server]
recipe = slapos.cookbook:pbs
client = false
path = $${directory:mariadb-backup}
wrapper = $${rootdirectory:bin}/rdiffbackup-server
rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup
[logrotate-entry-equeue]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = equeue
log = $${equeue:log}
frequency = daily
rotate-num = 30
[equeue]
recipe = slapos.cookbook:equeue
socket = $${basedirectory:run}/equeue.sock
log = $${basedirectory:log}/equeue.log
database = $${rootdirectory:srv}/equeue.db
wrapper = $${basedirectory:services}/equeue
equeue-binary = ${buildout:bin-directory}/equeue
[notifier]
recipe = slapos.cookbook:notifier
feeds = $${directory:notifier-feeds}
callbacks = $${directory:notifier-callbacks}
id-file = $${rootdirectory:etc}/notifier.id
equeue-socket = $${equeue:socket}
host = $${slap-network-information:global-ipv6}
port = 8080
wrapper = $${basedirectory:services}/notifier
server-binary = ${buildout:bin-directory}/pubsubserver
notifier-binary = ${buildout:bin-directory}/pubsubnotifier
[basedirectory]
cache = $${rootdirectory:var}/cache/
notifier = $${rootdirectory:etc}/notifier/
[directory]
mariadb-backup = $${basedirectory:backup}/mariadb/
ssh = $${rootdirectory:etc}/ssh/
sshkeys = $${rootdirectory:srv}/sshkeys
notifier-feeds = $${basedirectory:notifier}/feeds/
notifier-callbacks = $${basedirectory:notifier}/callbacks/
[buildout]
parts =
urls
mariadb
stunnel
certificate-authority
ca-stunnel
logrotate
logrotate-entry-mariadb
logrotate-entry-stunnel
logrotate-entry-cron
cron
cron-entry-logrotate
gzip-binary = ${gzip:location}/bin/gzip
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[urls]
recipe = slapos.cookbook:publish
url = mysqls://$${mariadb:user}:$${mariadb:password}@[$${stunnel:remote-host}]:$${stunnel:remote-port}/$${mariadb:database}
[mariadb]
recipe = slapos.cookbook:mysql
# Options
recovering = false
user = user
port = 3306
ip = $${slap-network-information:local-ipv4}
database = db
# Paths
wrapper = $${basedirectory:services}/mariadb
update-wrapper = $${basedirectory:services}/mariadb_update
logrotate-post = $${rootdirectory:bin}/mariadb_post_logrotate
data-directory = $${directory:mariadb-data}
pid-file = $${basedirectory:run}/mariadb.pid
socket = $${basedirectory:run}/mariadb.sock
error-log = $${basedirectory:log}/mariadb_error.log
conf-file = $${rootdirectory:etc}/mariadb.cnf
promise = $${basedirectory:promises}/mysql
# Binary information
mysql-binary = ${mariadb:location}/bin/mysql
mysql-install-binary = ${mariadb:location}/bin/mysql_install_db
mysql-upgrade-binary = ${mariadb:location}/bin/mysql_upgrade
mysqld-binary = ${mariadb:location}/libexec/mysqld
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests/
private = $${directory:ca-dir}/private/
certs = $${directory:ca-dir}/certs/
newcerts = $${directory:ca-dir}/newcerts/
crl = $${directory:ca-dir}/crl/
[ca-stunnel]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = $${stunnel:wrapper}
wrapper = $${basedirectory:services}/stunnel
key-file = $${stunnel:key-file}
cert-file = $${stunnel:cert-file}
[stunnel]
recipe = slapos.cookbook:stunnel
stunnel-binary = ${stunnel:location}/bin/stunnel
wrapper = $${rootdirectory:bin}/stunnel
log-file = $${basedirectory:log}/stunnel.log
config-file = $${directory:stunnel-conf}/stunnel.conf
key-file = $${directory:stunnel-conf}/stunnel.key
cert-file = $${directory:stunnel-conf}/stunnel.crt
pid-file = $${basedirectory:run}/stunnel.pid
local-host = $${mariadb:ip}
local-port = $${mariadb:port}
remote-host = $${slap-network-information:global-ipv6}
remote-port = 6446
client = false
post-rotate-script = $${rootdirectory:bin}/stunnel_post_rotate
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = $${buildout:gzip-binary}
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-mariadb]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = mariadb
log = $${mariadb:error-log}
frequency = daily
rotate-num = 30
post = $${mariadb:logrotate-post}
sharedscripts = true
notifempty = true
create = true
[logrotate-entry-stunnel]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = stunnel
log = $${stunnel:log-file}
frequency = daily
rotate-num = 30
notifempty = true
create = true
post = $${stunnel:post-rotate-script}
[logrotate-entry-cron]
<= logrotate
recipe =slapos.cookbook:logrotate.d
name = crond
log = $${cron-simplelogger:log}
frequency = daily
rotate-num = 30
notifempty = true
create = true
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
cronstamps = $${rootdirectory:etc}/cronstamps/
ca-dir = $${rootdirectory:srv}/ssl/
mariadb-data = $${rootdirectory:srv}/mariadb/
logrotate-backup = $${basedirectory:backup}/logrotate/
stunnel-conf = $${rootdirectory:etc}/stunnel/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
[buildout]
parts =
connection-dict
pbs
cron
cron-entry-logrotate
logrotate
sshkeys-authority
sshkeys-dropbear
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[connection-dict]
recipe = slapos.cookbook:publish
ssh-key = $${sshkeys-dropbear:public-key-value}
notification-url = http://[$${notifier:host}]:$${notifier:port}/notify
feeds-url = http://[$${notifier:host}]:$${notifier:port}/get/
[equeue]
recipe = slapos.cookbook:equeue
socket = $${basedirectory:run}/equeue.sock
log = $${basedirectory:log}/equeue.log
database = $${rootdirectory:srv}/equeue.db
wrapper = $${basedirectory:services}/equeue
equeue-binary = ${buildout:bin-directory}/equeue
[notifier]
recipe = slapos.cookbook:notifier
feeds = $${directory:notifier-feeds}
callbacks = $${directory:notifier-callbacks}
id-file = $${rootdirectory:etc}/notifier.id
equeue-socket = $${equeue:socket}
host = $${slap-network-information:global-ipv6}
port = 8080
wrapper = $${basedirectory:services}/notifier
server-binary = ${buildout:bin-directory}/pubsubserver
notifier-binary = ${buildout:bin-directory}/pubsubnotifier
[dropbear-client]
recipe = slapos.cookbook:dropbear.client
dbclient-binary = ${dropbear:location}/bin/dbclient
wrapper = $${rootdirectory:bin}/ssh
home = $${basedirectory:ssh-home}
identity-file = $${basedirectory:ssh-home}/id_rsa
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:sshkeys}/requests/
keys = $${directory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
keygen-binary = ${dropbear:location}/bin/dropbearkey
wrapper = $${basedirectory:services}/sshkeys_authority
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = pbs
type = rsa
executable = $${dropbear-client:wrapper}
public-key = $${dropbear-client:identity-file}.pub
private-key = $${dropbear-client:identity-file}
wrapper = $${rootdirectory:bin}/do_backup
[pbs]
<= notifier
recipe = slapos.cookbook:pbs
client = true
rdiffbackup-binary = ${buildout:bin-directory}/rdiff-backup
sshclient-binary = $${dropbear-client:wrapper}
known-hosts = $${directory:dot-ssh}/known_hosts
promises-directory = $${basedirectory:promises}
directory = $${directory:pbs-backup}
cron-entries = $${cron:cron-entries}
wrappers-directory = $${directory:pbs-wrappers}
notifier-url = http://[$${notifier:host}]:$${notifier:port}/
slave-instance-list = $${slap-parameter:slave_instance_list}
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
[logrotate-entry-equeue]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = equeue
log = $${equeue:log}
frequency = daily
rotate-num = 30
[logrotate-entry-equeue]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = cron
log = $${cron-simplelogger:log}
frequency = daily
rotate-num = 30
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc/
var = $${buildout:directory}/var/
srv = $${buildout:directory}/srv/
bin = $${buildout:directory}/bin/
tmp = $${buildout:directory}/tmp/
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log/
services = $${rootdirectory:etc}/run/
run = $${rootdirectory:var}/run/
backup = $${rootdirectory:srv}/backup/
promises = $${rootdirectory:etc}/promise/
[directory]
recipe = slapos.cookbook:mkdirectory
cronstamps = $${rootdirectory:etc}/cronstamps/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
cronoutput = $${basedirectory:log}/cron-ouput/
pbs-backup = $${basedirectory:backup}/pbs/
logrotate-entries = $${rootdirectory:etc}/logrotate.d/
logrotate-backup = $${basedirectory:backup}/logrotate/
sshkeys = $${rootdirectory:srv}/sshkeys
pbs-wrappers = $${rootdirectory:bin}/pbs/
dot-ssh = $${basedirectory:ssh-home}/.ssh/
notifier-feeds = $${basedirectory:notifier}/feeds/
notifier-callbacks = $${basedirectory:notifier}/callbacks/
[basedirectory]
ssh-home = $${rootdirectory:home}/ssh
notifier = $${rootdirectory:etc}/notifier/
[rootdirectory]
home = $${buildout:directory}/home/
......@@ -8,5 +8,10 @@ offline = true
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${instance_wordpress:output}
mariadb = ${instance_mariadb:output}
default = ${template-apache-php:output}
backuped = ${template-backuped:output}
mariadb = ${template-mariadb:output}
mariadb-pbsready-import = ${template-mariadb-pbsready-import:output}
mariadb-pbsready-export = ${template-mariadb-pbsready-export:output}
pull-backup = ${template-pull-backup:output}
apache-backup = ${template-apache-backup:output}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment