Commit d143cb62 authored by Jérome Perrin's avatar Jérome Perrin

Merge remote-tracking branch 'upstream/master' into zope4py2

parents e324de96 429c0f3d
Pipeline #25988 failed with stage
in 0 seconds
......@@ -14,8 +14,8 @@ extends =
[fish-shell]
recipe = slapos.recipe.cmmi
shared = true
url = https://github.com/fish-shell/fish-shell/releases/download/3.5.1/fish-3.5.1.tar.xz
md5sum = 59950f8ec9bf9e2fa02ff205dc5a9c57
url = https://github.com/fish-shell/fish-shell/releases/download/3.6.0/fish-3.6.0.tar.xz
md5sum = 2a1ae1070c6486093cbea507da8b30e0
configure-command = ${cmake:location}/bin/cmake
configure-options =
-DCMAKE_INSTALL_PREFIX=${:location}
......
......@@ -18,8 +18,8 @@ parts =
[git]
recipe = slapos.recipe.cmmi
shared = true
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.38.1.tar.xz
md5sum = abdafbfb85d205421903a2100c734b17
url = https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.39.1.tar.xz
md5sum = 74b450a513504fd7b3f5016f80de5e54
configure-options =
--with-curl=${curl:location}
--with-openssl=${openssl:location}
......
[buildout]
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
../libtool/buildout.cfg
../git/buildout.cfg
../openssl/buildout.cfg
../util-linux/buildout.cfg
parts = accords
[accords-source]
recipe = slapos.recipe.build:gitclone
repository = http://git.gitorious.ow2.org/ow2-compatibleone/accords-platform.git
[accords]
# Part of Compatible One project
# http://gitorious.ow2.org/ow2-compatibleone/accords-platform
recipe = hexagonit.recipe.cmmi
#url = http://gitorious.ow2.org/ow2-compatibleone/accords-platform/archive-tarball/master
path = ${accords-source:location}/
#md5sum =
# Replaces autogen.sh
configure-command =
./clean
libtoolize -c -f
aclocal -I ${libtool:location}/share/aclocal
automake -c -a -f --add-missing
autoreconf --install
./configure
configure-options =
--with-ssl=${openssl:location}
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
environment =
CPPFLAGS =-I${libuuid:location}/include -I${openssl:location}/include
LDFLAGS =-Wl,-rpath=${openssl:location}/lib -L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib
LIBTOOL=libtool
PATH=${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:%(PATH)s
# Accords makefile does not install necessary scripts. Let's copy it
[accords-scripts]
recipe = plone.recipe.command
requires = ${accords:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
origin = ${accords-source:location}/scripts
destination = ${:location}
command = ([ -e ${:origin}/co-broker ] && [ ! -e ${:destination}/co-broker ] && mkdir ${:destination} && cp -r ${:origin} ${:destination})
[buildout]
parts = apache-perl perl-Apache2-Request
extends =
../apache/buildout.cfg
../perl/buildout.cfg
../util-linux/buildout.cfg
[apache-perl]
# Note: Shall react on each build of apache and reinstall itself
recipe = slapos.recipe.cmmi
url = http://perl.apache.org/dist/mod_perl-2.0.5.tar.gz
md5sum = 03d01d135a122bd8cebd0cd5b185d674
configure-command =
${perl:location}/bin/perl Makefile.PL
configure-options =
MP_AP_PREFIX=${apache-2.2:location}
LIBS="-L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}"
# for mod_apreq2 and Apache2::Request
[perl-Apache2-Request]
recipe = slapos.recipe.build:cpan
modules =
I/IS/ISAAC/libapreq2-2.13.tar.gz
perl = perl
cpan-configuration =
makepl_arg = ('--with-apache2-apxs=${apache-2.2:location}/bin/apxs')
environment =
APR_LDFLAGS = -L${libuuid:location}/lib -Wl,-R${libuuid:location}/lib -L${libexpat:location}/lib -Wl,-R${libexpat:location}/lib -L${apache-2.2:location}/lib -Wl,-R${apache-2.2:location}/lib
LDFLAGS=-L${libuuid:location}/lib -Wl,-rpath=${libuuid:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib -L${apache-2.2:location}/lib -Wl,-rpath=${apache-2.2:location}/lib
--- a/client/scripts/Makefile.am 2012-08-24 12:11:10.400291229 +0200
+++ b/client/scripts/Makefile.am 2012-09-10 15:21:01.973773228 +0200
@@ -2,16 +2,16 @@
install-exec-hook:
chmod +x boinc-client
- $(INSTALL) -d $(DESTDIR)/etc/init.d
- $(INSTALL) -b boinc-client $(DESTDIR)/etc/init.d/boinc-client
+ $(INSTALL) -d $(DESTDIR)$(exec_prefix)/etc/init.d
+ $(INSTALL) -b boinc-client $(DESTDIR)$(exec_prefix)/etc/init.d/boinc-client
if [ -d /etc/sysconfig ] ; then \
- $(INSTALL) -d $(DESTDIR)/etc/sysconfig ; \
- $(INSTALL) boinc-client.conf $(DESTDIR)/etc/sysconfig/boinc-client ; \
+ $(INSTALL) -d $(DESTDIR)$(exec_prefix)/etc/sysconfig ; \
+ $(INSTALL) boinc-client.conf $(DESTDIR)$(exec_prefix)/etc/sysconfig/boinc-client ; \
elif [ -d /etc/default ] ; then \
- $(INSTALL) -d $(DESTDIR)/etc/default ; \
- $(INSTALL) boinc-client.conf $(DESTDIR)/etc/default/boinc-client ; \
+ $(INSTALL) -d $(DESTDIR)$(exec_prefix)/etc/default ; \
+ $(INSTALL) boinc-client.conf $(DESTDIR)$(exec_prefix)/etc/default/boinc-client ; \
else \
- $(INSTALL) -d $(DESTDIR)/etc ; \
- $(INSTALL) boinc-client.conf $(DESTDIR)/etc/boinc-client.conf ; \
+ $(INSTALL) -d $(DESTDIR)$(exec_prefix)/etc ; \
+ $(INSTALL) boinc-client.conf $(DESTDIR)$(exec_prefix)/etc/boinc-client.conf ; \
fi
\ No newline at end of file
[buildout]
parts =
boinc
extends =
../curl/buildout.cfg
../openssl/buildout.cfg
../zlib/buildout.cfg
../subversion/buildout.cfg
../m4/buildout.cfg
../automake/buildout.cfg
../libtool/buildout.cfg
../pkgconfig/buildout.cfg
../openldap/buildout.cfg
../gnutls/buildout.cfg
[download-boinc]
recipe = slapos.recipe.build:download
url = http://boinc.berkeley.edu/dl/boinc_7.0.28_x86_64-pc-linux-gnu.sh
md5sum = efdfd115ae69227ceb6118ca09777988
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = boinc_7.0.28_x86_64-pc-linux-gnu.sh
mode = 0744
[boinc-bin]
recipe = plone.recipe.command
on_install = true
on_update = true
command = cp ${download-boinc:location}/${download-boinc:filename} ${buildout:parts-directory} && \
cd ${buildout:parts-directory} && \
./${download-boinc:filename}
#boinc client is a binary: write LD_LABRARY_PATH
ld_library = ${curl:location}/lib:${openssl:location}/lib:${zlib:location}/lib
location = ${buildout:parts-directory}/BOINC
[boinc-patch]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/boinc-abs-path.patch
md5sum = 412acedfbcdc8a9a7f196a02465da248
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = boinc-abs-path.patch
#checkout boinc package from svn repository
[boinc-package]
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
tarball = ${buildout:parts-directory}/boinc.tar.gz
svnversion = ${subversion:location}/bin/svnversion
command = ${subversion:location}/bin/svn co --revision 26162 http://boinc.berkeley.edu/svn/branches/server_stable ${:location} && \
sed -i 's#/usr/bin/svnversion#${:svnversion}#' ${:location}/tools/make_project && \
cd ${:location} && rm -f ${:tarball} && tar -cvzf ${:tarball} .
update-command =
# Compile now boinc client
[boinc]
recipe = slapos.recipe.cmmi
url = ${boinc-package:tarball}
configure-command =
echo 'AC_PROG_OBJCXX' >> configure.ac
aclocal -I${pkgconfig:location}/share/aclocal -I${libtool:location}/share/aclocal
./_autosetup
./configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--disable-fcgi
--disable-shared
--enable-static
--with-pic
--disable-manager
--disable-server
--with-pkg-config
--with-ssl=${openssl:location}
--with-libldap=${openldap:location}/lib
--with-libgnutls=${gnutls:location}/lib
--with-libcurl=${curl:location}/lib
--with-x=no
patches =
${boinc-patch:location}/${boinc-patch:filename}
patch-options = -p1
environment =
PATH=${pkgconfig:location}/bin:${automake:location}/bin:${autoconf:location}/bin:${libtool:location}/bin:${subversion:location}/bin:${curl:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
M4=${m4:location}/bin/m4
CFLAGS=-fPIC
CPPFLAGS=-I${openssl:location}/include -I${zlib:location}/include
LDFLAGS = -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
ACLOCAL_PATH=${pkgconfig:location}/share/aclocal:${libtool:location}/share/aclocal
[buildout]
parts=
mariadb
boinc
extends =
../subversion/buildout.cfg
../m4/buildout.cfg
../automake/buildout.cfg
../libtool/buildout.cfg
../pkgconfig/buildout.cfg
../openssl/buildout.cfg
../cyrus-sasl/buildout.cfg
../openldap/buildout.cfg
../gnutls/buildout.cfg
../mariadb/buildout.cfg
../curl/buildout.cfg
#checkout boinc package from svn repository
[boinc-package]
recipe = plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
tarball = ${buildout:parts-directory}/boinc.tar.gz
svnversion = ${subversion:location}/bin/svnversion
command = ${subversion:location}/bin/svn co --revision 26162 http://boinc.berkeley.edu/svn/branches/server_stable ${:location} && \
sed -i 's#/usr/bin/svnversion#${:svnversion}#' ${:location}/tools/make_project && \
sed -i "20i\#include <unistd.h>" ${:location}/tools/process_input_template.cpp && \
sed -i "25i\#include <unistd.h>" ${:location}/sched/single_job_assimilator.cpp && \
sed -i "27i\#include <unistd.h>" ${:location}/tools/create_work.cpp && \
sed -i 's#--prefix=$(prefix)#--prefix=/$(prefix)#' ${:location}/py/Makefile.am && \
sed -i 's#parent::base_escape_string#@parent::base_escape_string#' ${:location}/html/inc/boinc_db.inc
cd ${:location} && rm -f ${:tarball} && tar -cvzf ${:tarball} .
update-command =
# Compile now boinc
# When installing boinc python module, path /srv/slapgrid/... is strip to srv/slapgrid... It is why
# we need to set --prefix="/${buildout:parts-directory}/${:_buildout_section_name_}"
[boinc]
recipe = slapos.recipe.cmmi
url = ${boinc-package:tarball}
keep-compile-dir = true
source = ${buildout:parts-directory}/${:_buildout_section_name_}__compile__
configure-command =
echo 'AC_PROG_OBJCXX' >> configure.ac
aclocal -I${pkgconfig:location}/share/aclocal -I${libtool:location}/share/aclocal
./_autosetup
./configure
configure-options =
--prefix="${buildout:parts-directory}/${:_buildout_section_name_}"
--disable-fcgi
--disable-shared
--enable-static
--disable-manager
--disable-client
--with-pkg-config
--with-ssl=${openssl:location}
--with-libsasl2=${cyrus-sasl:location}/lib
--with-libldap=${openldap:location}/lib
--with-libgnutls=${gnutls:location}/lib
--with-libcurl=${curl:location}/lib
--with-x=no
environment =
PATH=${pkgconfig:location}/bin:${automake:location}/bin:${autoconf:location}/bin:${libtool:location}/bin:${subversion:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
M4=${m4:location}/bin/m4
CPPFLAGS=-I${openssl:location}/include -I${mariadb:location}/include
LDFLAGS = -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${mariadb:location}/lib -Wl,-rpath=${mariadb:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
MYSQL_CONFIG=${mariadb:location}/bin/mysql_config
ACLOCAL_PATH=${pkgconfig:location}/share/aclocal:${libtool:location}/share/aclocal
\ No newline at end of file
[buildout]
extends =
../dcron/buildout.cfg
../libxml2/buildout.cfg
../logrotate/buildout.cfg
../rdiff-backup/buildout.cfg
../nodejs/buildout.cfg
parts =
nodejs
npm
cloud9
[cloud9]
<= cloud9-git
[node-sm]
recipe = slapos.recipe.build:npm
packages = sm@0.2.11
node = nodejs
environment =
PATH=${nodejs-0.6:location}/bin:%(PATH)s
[cloud9-stable]
# Online IDE written in javascript/node.js
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = plone.recipe.command
stop-on-error = true
commit = 5d18344936baf1d86b0fa5fc2c690051b4c77cb2
repository = https://github.com/ajaxorg/cloud9.git
location = ${buildout:parts-directory}/${:_buildout_section_name_}
git-binary = ${git:location}/bin/git
npm-binary = ${nodejs-0.4:location}/bin/node ${npm:location}/bin/npm
command = export GIT_SSL_NO_VERIFY=true; export HOME=${:location}; (${:git-binary} clone --quiet ${:repository} ${:location} && cd ${:location} && ${:git-binary} reset --hard ${:commit} && ${:git-binary} submodule update --init && cd support/jsdav && PATH=${nodejs-0.4:location}/bin:$PATH LDFLAGS=-L${libxml2:location}/lib ${:npm-binary} install) || (rm -fr ${:location}; exit 1)
update-command =
executable = ${:location}/bin/cloud9.js
[cloud9-session-directory.patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = cloud9-session-directory.patch
download-only = true
md5sum = 5dc8cc28447ed3747b8a53c768d872aa
[cloud9-socket.patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = cloud9-socket.patch
download-only = true
md5sum = c581456cb3a76841898f79f9600e3a1e
[cloud9-file-already-exist.patch]
# This patch prevents the error "File already exists"
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = cloud9-file_already_exist.patch
download-only = true
md5sum = 0bc104af8176388d60cbf884b72c8318
[cloud9-remove-all-listeners.patch]
# This patch prevents cloud9 to die every
# time a tab is closed
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = cloud9-removeAllListeners.patch
download-only = true
md5sum = 357915330d677f4917140e02a55646b7
[cloud9-git-download]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/ajaxorg/cloud9.git
revision = f7d102bc225c922f116d2cea52a746d64343ea59
location = ${buildout:parts-directory}/cloud9
git-executable = ${git:location}/bin/git
develop = true
use-cache = true
ignore-ssl-certificate = true
[cloud9-git]
# Online IDE written in javascript/node.js
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = plone.recipe.command
stop-on-error = true
environment = export GIT_SSL_NO_VERIFY=true; export PATH=${git:location}/bin:${nodejs-0.6:location}/bin:${node-sm:location}/node_modules/.bin/:$PATH; export CPPFLAGS="-I${libxml2:location}/include -I${nodejs-0.6:location}/include"; export LDFLAGS="-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib"; export HOME=${cloud9-git-download:location};
command = ${:environment} (cd ${cloud9-git-download:location} && ${node-sm:location}/node_modules/.bin/sm install && patch -p1 < ${cloud9-session-directory.patch:location}/${cloud9-session-directory.patch:filename} && patch -p1 < ${cloud9-socket.patch:location}/${cloud9-socket.patch:filename} && patch -p1 < ${cloud9-file-already-exist.patch:location}/${cloud9-file-already-exist.patch:filename} && patch -p1 < ${cloud9-remove-all-listeners.patch:location}/${cloud9-remove-all-listeners.patch:filename}) || (rm -fr ${cloud9-git-download:location}; exit 1)
update-command = true
executable = ${cloud9-git-download:location}/server.js
[cloud9-npm]
# Online IDE written in javascript/node.js
# URL : c9.io
# You can use it using the following command :
# NODE_PATH=${:destination}/node_modules ${nodejs:node_location} ${:cloud9_js_location}
recipe = slapos.recipe.npm
# Node part has to be specified, otherwise system node is used.
node = nodejs
# List of packages to install
packages =
cloud9==0.7
# Specify environment jsDAV (dependency of cloud9) needs libxml2
environment =
LDFLAGS=-L${libxml2:location}/lib -Wl,-rpath=${libxml2:location}/lib
diff --git a/node_modules/vfs-local/localfs.js b/node_modules/vfs-local/localfs.js
index 7ce9981..1dd07b7 100644
--- a/node_modules/vfs-local/localfs.js
+++ b/node_modules/vfs-local/localfs.js
@@ -677,7 +677,7 @@ module.exports = function setup(fsOptions) {
else {
var err = new Error("File already exists.");
err.code = "EEXIST";
- callback(err);
+ //callback(err);
}
});
});
diff --git a/plugins-server/cloud9.ide.watcher/file_watcher.js b/plugins-server/cloud9.ide.watcher/file_watcher.
index b7ed7da..36dcd05 100644
--- a/plugins-server/cloud9.ide.watcher/file_watcher.js
+++ b/plugins-server/cloud9.ide.watcher/file_watcher.js
@@ -69,11 +69,11 @@ util.inherits(FileWatcher, EventEmitter);
this.close = function() {
if (this.watcher) {
- this.watcher.removeAllListeners();
+ //this.watcher.removeAllListeners();
this.watcher.close();
this.emit("close");
}
this.watcher = null;
};
-}).call(FileWatcher.prototype);
\ No newline at end of file
+}).call(FileWatcher.prototype);
diff --git a/configs/default.js b/configs/default.js
index 6d1c85f..be35b37 100644
--- a/configs/default.js
+++ b/configs/default.js
@@ -22,6 +22,8 @@ var vfsUrl = "/vfs";
var port = argv.p || process.env.PORT || 3131;
var host = argv.l || process.env.IP || "localhost";
+var home = process.env['HOME']
+
var config = [
{
packagePath: "connect-architect/connect",
@@ -167,7 +169,7 @@ var config = [
},
{
packagePath: "connect-architect/connect.session.file",
- sessionsPath: __dirname + "/../.sessions"
+ sessionsPath: home + "/.sessions"
},
"./cloud9.permissions",
{
\ No newline at end of file
diff --git a/node_modules/smith.io/node_modules/engine.io/node_modules/engine.io-client/dist/engine.io-dev.js b/node_modules/smith.io/node_modules/engine.io/node_modules/engine.io-client/dist/engine.io-dev.js
index fa7e54a..14b8e67 100644
--- a/node_modules/smith.io/node_modules/engine.io/node_modules/engine.io-client/dist/engine.io-dev.js
+++ b/node_modules/smith.io/node_modules/engine.io/node_modules/engine.io-client/dist/engine.io-dev.js
@@ -2126,7 +2126,7 @@ Polling.prototype.uri = function () {
query = '?' + query;
}
- return schema + '://' + this.host + port + this.path + query;
+ return this.path + query;
};
});require.register("transports/websocket.js", function(module, exports, require, global){
[buildout]
parts =
condor
extends =
../libexpat/buildout.cfg
../kerberos/buildout.cfg
../perl/buildout.cfg
../java/buildout.cfg
../openldap/buildout.cfg
../dash/buildout.cfg
../zlib/buildout.cfg
[condor]
recipe = slapos.recipe.build
slapos_promise =
directory:bin
directory:etc
directory:examples
directory:include
directory:lib
directory:libexec
directory:man
directory:src
directory:sbin
depends =
${libexpat:location}
${kerberos:location}
${perl:location}
${openldap:location}
${java:location}
${dash:location}
${zlib:location}
x86 = http://parrot.cs.wisc.edu//symlink/20130530031504/7/7.9/7.9.5/28b36a94ad1a405bac689a76b6c353a7/condor-7.9.5-x86_Debian6-unstripped.tar.gz 227059bb9bebc9033665bb246d90a7bb
x86-64 = http://parrot.cs.wisc.edu//symlink/20130530031504/7/7.9/7.9.5/7000a12f9a22765b457e2958762871d5/condor-7.9.5-x86_64_Debian6-stripped.tar.gz 30fa4cebabb8ff4971c769f4ef74ba68
install =
url, md5sum = self.options[guessPlatform()].split()
extract_dir = self.extract(self.download(url, md5sum))
workdir = guessworkdir(extract_dir)
self.copyTree(workdir, location)
[buildout]
extends =
../openssl/buildout.cfg
../curl/buildout.cfg
../erlang/buildout.cfg
../icu/buildout.cfg
../spidermonkey/buildout.cfg
parts = couchdb
[couchdb]
recipe = slapos.recipe.cmmi
url = http://mir2.ovh.net/ftp.apache.org/dist//couchdb/1.1.1/apache-couchdb-1.1.1.tar.gz
md5sum = cd126219b9cb69a4c521abd6960807a6
configure-options =
--with-erlang=${erlang:location}/lib/erlang/usr/include
--with-js-include=${spidermonkey-1.7:location}/include
--with-js-lib=${erlang:location}/lib
--enable-js-trunk
environment-section = couchdb-environ
[couchdb-environ]
PATH = ${erlang:location}/bin:${icu4c:location}/bin:${curl:location}/bin:%(PATH)s
# XXX: Gotta put everything on the same line. If not, it won't compile
CFLAGS = -I${icu4c:location}/include -I${spidermonkey-1.7:location}/include -I${curl:location}/include
LDFLAGS = -L${icu4c:location}/lib -Wl,-rpath=${icu4c:location}/lib -L${spidermonkey-1.7:location}/lib -Wl,-rpath=${spidermonkey-1.7:location}/lib -L${curl:location}/lib -Wl,-rpath=${curl:location}/lib
[buildout]
extends =
../librsync/buildout.cfg
../patch/buildout.cfg
parts =
duplicity
[duplicity]
recipe = slapos.recipe.build
url = http://code.launchpad.net/duplicity/0.6-series/0.6.15/+download/duplicity-0.6.15.tar.gz
md5sum = 88f3c990f41fde86cd7d5af5a1bc7b81
patch = ${:_profile_base_location_}/duplicity-ipv6.patch 32c44816a9a59401e233ef622bf50223
script =
import os
import tempfile
import sys
workingdir = guessworkdir(self.extract(self.download(%(url)r, %(md5sum)r)))
os.chdir(workingdir)
self.applyPatchList('${:patch}', patch_binary='${patch:location}/bin/patch', patch_options='-p0', cwd=workingdir)
call([sys.executable, os.path.join(workingdir, 'setup.py'),
'build_ext',
'--include-dirs', os.path.join('${librsync:location}', 'include'),
'--library-dirs', os.path.join('${librsync:location}', 'lib'),
'--rpath', os.path.join('${librsync:location}', 'lib')])
call([sys.executable, os.path.join(workingdir, 'setup.py'),
'install',
'--prefix', %(location)r])
os.chdir(os.path.join(%(location)r, 'bin'))
paths = [os.path.join(%(location)r, 'lib', 'python%%s.%%s' %% sys.version_info[:2], 'site-packages')]
from zc.buildout.easy_install import scripts
scripts([('duplicity', '__builtin__', 'execfile')], [], sys.executable, '${buildout:bin-directory}', arguments='%%r' %% os.path.abspath('duplicity'), extra_paths=paths)
scripts([('rdiffdir', '__builtin__', 'execfile')], [], sys.executable, '${buildout:bin-directory}', arguments='%%r' %% os.path.abspath('rdiffdir'), extra_paths=paths)
=== modified file 'src/urlparse_2_5.py'
--- src/urlparse_2_5.py 2011-03-06 15:12:33 +0000
+++ src/urlparse_2_5.py 2011-08-31 14:21:06 +0000
@@ -109,18 +109,19 @@
password = property(get_password)
def get_hostname(self):
- netloc = self.netloc
- if "@" in netloc:
- netloc = _rsplit(netloc, "@", 1)[1]
- if ":" in netloc:
- netloc = netloc.split(":", 1)[0]
- return netloc.lower() or None
+ netloc = self.netloc.split('@')[-1]
+ if '[' in netloc and ']' in netloc:
+ return netloc.split(']')[0][1:].lower()
+ elif ':' in netloc:
+ return netloc.split(':')[0].lower()
+ elif netloc == '':
+ return None
+ else:
+ return netloc.lower()
hostname = property(get_hostname)
def get_port(self):
- netloc = self.netloc
- if "@" in netloc:
- netloc = _rsplit(netloc, "@", 1)[1]
+ netloc = self.netloc.split('@')[-1].split(']')[-1]
if ":" in netloc:
port = netloc.split(":", 1)[1]
return int(port, 10)
[buildout]
extends =
../openssl/buildout.cfg
../ncurses/buildout.cfg
../zlib/buildout.cfg
../java/buildout.cfg
parts = erlang
[erlang]
recipe = slapos.recipe.cmmi
url = http://www.erlang.org/download/otp_src_R14B04.tar.gz
md5sum = 4b469729f103f52702bfb1fb24529dc0
configure-options =
--with-ssl=${openssl:location}
--with-java
--enable-dynamic-ssl-lib
--enable-shared-zlib
--enable-native-libs
environment=
PATH=${java-sdk-1.6.0:location}/bin:%(PATH)s
CFLAGS=-I${ncurses:location}/include -I${zlib:location}/include/
LDFLAGS=-L${ncurses:location}/lib/ -L${zlib:location}/lib/ -Wl,-rpath=${ncurses:location}/lib/ -Wl,-rpath=${zlib:location}/lib/
make-options =
-j1
[buildout]
extends =
../lxml-python/buildout.cfg
../python-kerberos/buildout.cfg
../git/buildout.cfg
develop =
gateone-repository
parts =
gateone-develop
gateone
[gateone-repository]
recipe = slapos.recipe.build:gitclone
repository = https://github.com/liftoff/GateOne.git
branch = master
git-executable = ${git:location}/bin/git
[gateone-develop]
recipe = zc.recipe.egg:develop
setup = ${gateone-repository:location}
[gateone]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
${python-kerberos:egg}
tornado
setuptools
pyOpenSSL
futures
PIL
gateone
[buildout]
parts =
hookbox
versions = versions
[versions]
# special version of hookbox supporting history_duration option
# https://github.com/fdiary/hookbox
hookbox = 0.3.4nxd001
[hookbox]
recipe = zc.recipe.egg
eggs =
hookbox
scripts =
hookbox
--- ATLAS/CONFIG/src/config.c.orig 2014-07-10 18:22:02.000000000 +0200
+++ ATLAS/CONFIG/src/config.c 2014-08-22 13:28:04.990976246 +0200
@@ -455,10 +455,10 @@
if (OSIsWin(OS))
OSextra = "-lgcc";
- i = strlen(path) + strlen(libnam) + strlen(OSextra) + 2 + 2 + 1 + 1 + 1;
+ i = strlen(path) * 2 + strlen(libnam) + strlen(OSextra) + 2 + 11 + 2 + 1 + 1 + 1 + 1;
sp = malloc(i*sizeof(char));
assert(sp);
- sprintf(sp, "-L%s -l%s %s", path, libnam, OSextra);
+ sprintf(sp, "-L%s -Wl,-rpath=%s -l%s %s", path, path, libnam, OSextra);
return(sp);
}
[buildout]
extends =
../patch/buildout.cfg
parts =
libatlas
[lapack-download]
recipe = hexagonit.recipe.download
ignore-existing = true
version = 3.5.0
filename = lapack-${:version}.tgz
url = http://www.netlib.org/lapack/${:filename}
md5sum = b1d3e3e425b2e44a06760ff173104bdf
download-only = true
[libatlas]
recipe = slapos.recipe.cmmi
version = 3.10.2
url = http://downloads.sourceforge.net/project/math-atlas/Stable/${:version}/atlas${:version}.tar.bz2
md5sum = a4e21f343dec8f22e7415e339f09f6da
# http://stackoverflow.com/questions/14592401/atlas-install-really-need-to-get-past-cpu-throttle-check
# http://sourceforge.net/p/math-atlas/support-requests/886/
patches =
${:_profile_base_location_}/skip-throttle-check.patch#17c8471d67c99fac80ace05273ce0817
${:_profile_base_location_}/add-rpath.patch#44672b68960a14b3fb4970cd1e76f8fe
patch-options = -p1
configure-command =
mkdir build
cd build
../configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-netlib-lapack-tarfile=${lapack-download:location}/${lapack-download:filename}
--shared
-b "$(uname -m | grep -q 64 && echo 64 || echo 32)"
-Fa alg '-fPIC'
make-options =
-C build
environment =
PATH=${patch:location}/bin:%(PATH)s
diff --git a/CONFIG/src/probe_arch.c b/CONFIG/src/probe_arch.c
index 75edef9..f440bf6 100644
--- a/CONFIG/src/probe_arch.c
+++ b/CONFIG/src/probe_arch.c
@@ -238,8 +238,7 @@ int main(int nargs, char **args)
printf("CPU MHZ=%d\n",
ProbeOneInt(OS, asmd, targ, "-m", "CPU MHZ=", &sure));
if (flags & Pthrottle)
- printf("CPU THROTTLE=%d\n",
- ProbeOneInt(OS, asmd, targ, "-t", "CPU THROTTLE=", &sure));
+ printf("CPU THROTTLE=0\n");
if (flags & P64)
{
if (asmd == gas_x86_64)
[buildout]
parts =
libmemcached
[libmemcached]
<= libmemcached-0.50
[libmemcached-0.50]
<= libmemcached-common
url = http://launchpad.net/libmemcached/1.0/0.50/+download/libmemcached-0.50.tar.gz
md5sum = c8627014a37cd821cf93317b8de6f9f8
[libmemcached-0.44]
<= libmemcached-common
url = http://launchpad.net/libmemcached/1.0/0.44/+download/libmemcached-0.44.tar.gz
md5sum = e6bd825c46fa080b550f90f9001cba8c
[libmemcached-common]
recipe = slapos.recipe.cmmi
configure-options =
--without-docs
--without-memcached
--without-libgtest-prefix
--without-libevent-prefix
--without-libinnodb-prefix
--without-libsasl-prefix
--without-libsasl2-prefix
[buildout]
extends =
../patch/buildout.cfg
../attr/buildout.cfg
../libcap/buildout.cfg
parts = lxc
[lxc]
<= lxc-0.8
[lxc-0.8.0-rc2-libexecdir-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = libexecdir-fix.patch
download-only = true
md5sum = d674463ccb3a7c205c2326fb4ab5436b
[lxc-0.8.0-rc2-lxc-ls-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = lxc-ls-fix.patch
download-only = true
md5sum = 28c155a554d4f4856351085494585c73
[lxc-0.8.0-rc2-cap_get_flag-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
filename = cap_get_flag-fix.patch
download-only = true
md5sum = 8d3706a9bd60b6ebfea33998644b1a99
[lxc-0.8]
recipe = slapos.recipe.cmmi
url = http://lxc.sourceforge.net/download/lxc/lxc-0.8.0-rc2.tar.gz
md5sum = 9bd6988542fd7dd198d056ef3a2db9f6
patch-binary = ${patch:location}/bin/patch
patch-options = -p1
patches =
${lxc-0.8.0-rc2-cap_get_flag-patch:location}/${lxc-0.8.0-rc2-cap_get_flag-patch:filename}
${lxc-0.8.0-rc2-libexecdir-patch:location}/${lxc-0.8.0-rc2-libexecdir-patch:filename}
${lxc-0.8.0-rc2-lxc-ls-patch:location}/${lxc-0.8.0-rc2-lxc-ls-patch:filename}
environment =
PATH=%(PATH)s:${attr:location}/bin/:${libcap:location}/sbin/
CFLAGS=-I${libcap:location}/include
LDFLAGS=-L${libcap:location}/lib/ -Wl,-rpath=${libcap:location}/lib/
commit 94767c5249b5802a894f6d84f6245ef86f50bff3
Author: Serge Hallyn <serge.hallyn@ubuntu.com>
Date: Fri Jun 29 10:37:07 2012 -0500
Fix lxc's handling of CAP_LAST_CAP
CAP_LAST_CAP in linux/capability.h doesn't always match what the kernel
actually supports. If the kernel supports fewer capabilities, then a
cap_get_flag for an unsupported capability returns -EINVAL.
Recognize that, and don't fail when initializing capabilities when this
happens, rather accept that we've reached the last capability.
Signed-off-by: Serge Hallyn <serge.hallyn@ubuntu.com>
diff --git a/src/lxc/caps.c b/src/lxc/caps.c
index 10a0b4a..c32e7e4 100644
--- a/src/lxc/caps.c
+++ b/src/lxc/caps.c
@@ -28,6 +28,7 @@
#include <limits.h>
#include <sys/prctl.h>
#include <sys/capability.h>
+#include <errno.h>
#include "log.h"
@@ -90,6 +91,7 @@ int lxc_caps_up(void)
cap_t caps;
cap_value_t cap;
int ret;
+ int lastcap = 0;
/* when we are run as root, we don't want to play
* with the capabilities */
@@ -108,9 +110,15 @@ int lxc_caps_up(void)
ret = cap_get_flag(caps, cap, CAP_PERMITTED, &flag);
if (ret) {
- ERROR("failed to cap_get_flag: %m");
- goto out;
+ if (errno == EINVAL) {
+ INFO("Last supported cap was %d\n", cap-1);
+ break;
+ } else {
+ ERROR("failed to cap_get_flag: %m");
+ goto out;
+ }
}
+ lastcap = cap;
ret = cap_set_flag(caps, CAP_EFFECTIVE, 1, &cap, flag);
if (ret) {
commit 00ad19d4dba5c05401125d4217dc8f4e7fd9403a
Author: David Ward <david.ward@ll.mit.edu>
Date: Fri May 4 00:50:15 2012 +0200
lxc-setcap/lxc-setuid: add autoconf expansion for $libexecdir
Support new default location for LXCINITDIR.
Signed-off-by: David Ward <david.ward@ll.mit.edu>
Signed-off-by: Daniel Lezcano <dlezcano@fr.ibm.com>
diff --git a/configure.ac b/configure.ac
index c2bf4b0..0c8aa69 100644
--- a/configure.ac
+++ b/configure.ac
@@ -40,6 +40,7 @@ AM_CONDITIONAL([ENABLE_EXAMPLES], [test "x$enable_examples" = "xyes"])
AS_AC_EXPAND(PREFIX, $prefix)
AS_AC_EXPAND(LIBDIR, $libdir)
AS_AC_EXPAND(BINDIR, $bindir)
+AS_AC_EXPAND(LIBEXECDIR, $libexecdir)
AS_AC_EXPAND(INCLUDEDIR, $includedir)
AS_AC_EXPAND(SYSCONFDIR, $sysconfdir)
AS_AC_EXPAND(DATADIR, $datadir)
diff --git a/src/lxc/lxc-setcap.in b/src/lxc/lxc-setcap.in
index 52d4b48..71e3710 100644
--- a/src/lxc/lxc-setcap.in
+++ b/src/lxc/lxc-setcap.in
@@ -84,6 +84,7 @@ lxc_dropcaps()
shortoptions='hd'
longoptions='help'
libdir=@LIBDIR@
+libexecdir=@LIBEXECDIR@
localstatedir=@LOCALSTATEDIR@
getopt=$(getopt -o $shortoptions --longoptions $longoptions -- "$@")
diff --git a/src/lxc/lxc-setuid.in b/src/lxc/lxc-setuid.in
index 0919eac..020dfae 100644
--- a/src/lxc/lxc-setuid.in
+++ b/src/lxc/lxc-setuid.in
@@ -81,6 +81,7 @@ lxc_dropuid()
shortoptions='hd'
longoptions='help'
libdir=@LIBDIR@
+libexecdir=@LIBEXECDIR@
localstatedir=@LOCALSTATEDIR@
getopt=$(getopt -o $shortoptions --longoptions $longoptions -- "$@")
commit 7ef0141356454503ab81460290b5dffa32c1f441
Author: David Ward <david.ward@ll.mit.edu>
Date: Fri May 4 00:50:15 2012 +0200
refresh lxc-ls
Add an '--active' option that lists active containers by searching
cgroups. (Otherwise, the directories in /var/lib/lxc are listed.)
Modify the cgroup search to only use hierarchies that contain one
or more subsystems. When searching, if a hierarchy contains the
'ns' subsystem, do not append '/lxc' to the parent cgroup.
Add a '--help' option that prints the command syntax.
Print error messages and help information to stderr.
Update the documentation.
Signed-off-by: David Ward <david.ward@ll.mit.edu>
Signed-off-by: Daniel Lezcano <dlezcano@fr.ibm.com>
diff --git a/doc/lxc-ls.sgml.in b/doc/lxc-ls.sgml.in
index 3ffd4f8..d33e9b3 100644
--- a/doc/lxc-ls.sgml.in
+++ b/doc/lxc-ls.sgml.in
@@ -48,7 +48,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
<refsynopsisdiv>
<cmdsynopsis>
- <command>lxc-ls <optional>ls option</optional>
+ <command>lxc-ls <optional>--active</optional> <optional>ls option</optional>
</command>
</cmdsynopsis>
</refsynopsisdiv>
@@ -67,6 +67,17 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
<varlistentry>
<term>
+ <option><optional>--active</optional></option>
+ </term>
+ <listitem>
+ <para>
+ List active containers.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>
<option><optional>ls options</optional></option>
</term>
<listitem>
@@ -94,10 +105,10 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
</varlistentry>
<varlistentry>
- <term>lxc-ls -1</term>
+ <term>lxc-ls --active -1</term>
<listitem>
<para>
- list all the containers and display the list in one column.
+ list active containers and display the list in one column.
</para>
</listitem>
</varlistentry>
diff --git a/src/lxc/lxc-ls.in b/src/lxc/lxc-ls.in
index a1ad642..11a3b45 100644
--- a/src/lxc/lxc-ls.in
+++ b/src/lxc/lxc-ls.in
@@ -1,43 +1,100 @@
#!/bin/bash
-localstatedir=@LOCALSTATEDIR@
-lxcpath=@LXCPATH@
+#
+# lxc: linux Container library
-if [ ! -r $lxcpath ]; then
- exit 0
-fi
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+localstatedir=@LOCALSTATEDIR@
+lxc_path=@LXCPATH@
-function get_cgroup()
+usage()
{
- local mount_string
- mount_string=$(mount -t cgroup |grep -E -e '^lxc ')
- if test -n "$mount_string"; then
- mount_point=$(echo $mount_string |cut -d' ' -f3)
- return
- fi
- mount_string=`grep -m1 -E '^[^ \t]+[ \t]+[^ \t]+[ \t]+cgroup' /proc/self/mounts`;
- if test -z "$mount_string"; then
- echo "failed to find mounted cgroup"
- exit 1
- fi
- mount_point=`echo "$mount_string" |cut -d' ' -f2`;
+ echo "usage: $(basename $0) [--active] [--] [LS_OPTIONS...]" >&2
}
-ls "$@" $lxcpath
+help() {
+ usage
+ echo >&2
+ echo "List containers existing on the system." >&2
+ echo >&2
+ echo " --active list active containers" >&2
+ echo " LS_OPTIONS ls command options (see \`ls --help')" >&2
+}
+
+get_parent_cgroup()
+{
+ local hierarchies hierarchy fields subsystems init_cgroup mountpoint
+
+ parent_cgroup=""
+
+ # Obtain a list of hierarchies that contain one or more subsystems
+ hierarchies=$(tail -n +2 /proc/cgroups | cut -f 2)
-active=$(netstat -xl 2>/dev/null | grep $lxcpath | \
- sed -e 's#.*'"$lxcpath/"'\(.*\)/command#\1#');
+ # Iterate through the list until a suitable hierarchy is found
+ for hierarchy in $hierarchies; do
+ # Obtain information about the init process in the hierarchy
+ fields=$(grep -E "^$hierarchy:" /proc/1/cgroup | head -n 1)
+ if [ -z "$fields" ]; then continue; fi
+ fields=${fields#*:}
-if test -n "$active"; then
- get_cgroup
- if test -n "$mount_point"; then
- # get cgroup for init
- init_cgroup=`cat /proc/1/cgroup | awk -F: '{ print $3 }' | head -1`
- if [ ! -d $mount_point/$init_cgroup/lxc ]; then
- cd $mount_point/$init_cgroup
+ # Get a comma-separated list of the hierarchy's subsystems
+ subsystems=${fields%:*}
+
+ # Get the cgroup of the init process in the hierarchy
+ init_cgroup=${fields#*:}
+
+ # Get the filesystem mountpoint of the hierarchy
+ mountpoint=$(grep -E "^cgroup [^ ]+ [^ ]+ ([^ ]+,)?$subsystems(,[^ ]+)? " /proc/self/mounts | cut -d ' ' -f 2)
+ if [ -z "$mountpoint" ]; then continue; fi
+
+ # Return the absolute path to the containers' parent cgroup
+ # (do not append '/lxc' if the hierarchy contains the 'ns' subsystem)
+ if [[ ",$subsystems," == *,ns,* ]]; then
+ parent_cgroup="${mountpoint}${init_cgroup%/}"
else
- cd $mount_point/$init_cgroup/lxc
+ parent_cgroup="${mountpoint}${init_cgroup%/}/lxc"
fi
- ls "$@" -d $active
- fi
+ break
+ done
+}
+
+directory="$lxc_path"
+
+for i in "$@"; do
+ case $i in
+ --help)
+ help; exit 1;;
+ --active)
+ get_parent_cgroup; directory="$parent_cgroup"; shift;;
+ --)
+ shift; break;;
+ *)
+ break;;
+ esac
+done
+
+containers=""
+if [ ! -z "$directory" ]; then
+ containers=$(find $directory -mindepth 1 -maxdepth 1 -type d -printf "%f\n" 2>/dev/null)
fi
+
+if [ -z "$containers" ]; then
+ echo "$(basename $0): no containers found" >&2
+ exit 1
+fi
+
+cd "$directory"
+ls -d $@ $containers
[buildout]
extends =
../curl/buildout.cfg
../libevent/buildout.cfg
../erlang/buildout.cfg
parts =
membase-source
membase-memcached
membase-libmemcached
membase-libconflate
membase-libvbucket
membase-moxi
membase-bucket_engine
membase-ep-engine
membase-membase-cli
membase-memcachetest
membase-ns_server
membase-vbucketmigrator
[membase]
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[membase-source]
<= membase-1.7.0-source
[membase-1.7.0-source]
<= membase-source-common
url = http://files.couchbase.com/source/membase-server_src-1.7.0.tar.gz
md5sum = c933fffea299d00e43b002cb65738663
[membase-source-common]
recipe = hexagonit.recipe.download
ignore-existing = true
strip-top-level-dir = true
[membase-component-common]
recipe = slapos.recipe.cmmi
path = ${membase-source:location}/${:component}
prefix = ${membase:location}
configure-options =
--prefix=${:prefix}
${:component-configure-options}
[membase-libmemcached]
<= membase-component-common
component = libmemcached
component-configure-options =
--disable-static
--enable-shared
--disable-dtrace
--without-docs
--disable-sasl
--with-memcached=${membase:location}/bin/memcached
[membase-memcached]
<= membase-component-common
component = memcached
patches =
configure-command =
./configure
component-configure-options =
--enable-isasl
[membase-libconflate]
<= membase-component-common
component = libconflate
component-configure-options =
--disable-static
--enable-shared
--without-check
--with-libcurl-prefix=${curl:location}
[membase-libvbucket]
<= membase-component-common
component = libvbucket
component-configure-options =
--disable-static
--enable-shared
--without-docs
--with-libhashkit-prefix=${membase:location}
[membase-moxi]
<= membase-component-common
component = moxi
component-configure-options =
--enable-moxi-libvbucket
--enable-moxi-libmemcached
--without-check
--with-libevent-prefix=${libevent:location}
--with-libmemcached-prefix=${membase:location}
--with-memcached=${membase:location}/bin/memcached
--with-libhashkit-prefix=${membase:location}
--with-libconflate-prefix=${membase:location}
--with-libvbucket-prefix=${membase:location}
[membase-bucket_engine]
<= membase-component-common
component = bucket_engine
component-configure-options =
--with-memcached=${membase-source:location}/memcached
[membase-ep-engine]
<= membase-component-common
component = ep-engine
component-configure-options =
--with-memcached=${membase-source:location}/memcached
[membase-membase-cli]
<= membase-component-common
component = membase-cli
patches =
configure-command =
./configure
component-configure-options =
[membase-memcachetest]
<= membase-component-common
component = memcachetest
component-configure-options =
--with-memcached=${membase:location}/bin/memcached
[membase-ns_server]
<= membase-component-common
component = ns_server
patches =
configure-command =
./configure
component-configure-options =
environment =
PATH=${erlang:location}/bin:%(PATH)s
[membase-vbucketmigrator]
<= membase-component-common
component = vbucketmigrator
component-configure-options =
--without-sasl
--with-isasl
[buildout]
extends =
../libmemcached/buildout.cfg
../git/buildout.cfg
../autoconf/buildout.cfg
../automake/buildout.cfg
../libtool/buildout.cfg
parts =
memstrike
[memstrike]
recipe = slapos.recipe.cmmi
path = ${memstrikesource:location}
configure-command =
./bootstrap
./configure
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-libmemcached=${libmemcached:location}
environment =
PATH =${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:%(PATH)s
LDFLAGS =-Wl,-rpath=${libmemcached:location}/lib
[memstrikesource]
recipe=plone.recipe.command
location = ${buildout:parts-directory}/${:_buildout_section_name_}
stop-on-error = true
revision = 39d7a99e8bb7eea6df8b
command =
rm -rf ${:location} &&
${git:location}/bin/git clone --quiet git://github.com/frsyuki/memstrike.git ${:location} &&
cd ${:location} &&
${git:location}/bin/git checkout --quiet ${:revision}
[buildout]
extends =
../cmake/buildout.cfg
../glib/buildout.cfg
../pkgconfig/buildout.cfg
../openssl/buildout.cfg
../pcre/buildout.cfg
../mariadb/buildout.cfg
parts = mydumper
# XXX-Antoine:
# This is really dirty, but it's the only way to install
# mydumper that works
[mydumper]
recipe = slapos.recipe.build
url = https://launchpad.net/mydumper/0.5/0.5.1/+download/mydumper-0.5.1.tar.gz
md5sum = b4df42dfe23f308ab13eb6ecb73a0d21
buildout-bin-dir = ${buildout:bin-directory}
cmake-command = ${cmake:location}/bin/cmake
mysql-config = ${mariadb:location}/bin/mysql_config
mysqllib = ${mariadb:location}/lib
path = ${pkgconfig:location}/bin
pkg-config-path = ${glib:location}/lib/pkgconfig/:${pcre:location}/lib/pkgconfig/:${openssl:location}/lib/pkgconfig/
libraries = ${zlib:location}/lib/:${glib:location}/lib/:${pcre:location}/lib/:${mariadb:location}/lib/:${openssl:location}/lib/
includes = ${zlib:location}/include/:${glib:location}/include/:${pcre:location}/include/:${mariadb:location}/include:${openssl:location}/include/
cflags = -I${zlib:location}/include/ -I${glib:location}/include/ -I${pcre:location}/include/ -I${mariadb:location}/include -I${openssl:location}/include/
mysql-include-dir = ${mariadb:location}/include
mydumper-patches =
${:_profile_base_location_}/mydumper-remove-warnings-errors.patch 917fea16b5ddea195cfa33fbd9827f57 -p1
slapos_promise =
directory:bin
file:bin/mydumper
file:bin/myloader
script =
import os
url = self.download(self.options['url'], self.options.get('md5sum'))
extract_dir = self.extract(url)
workdir = guessworkdir(extract_dir)
self.applyPatchList(self.options['mydumper-patches'], cwd=workdir)
env['PATH'] = self.options['path'] + ':' + self.options['buildout-bin-dir'] + ':' + env.get('PATH', '')
env['PKG_CONFIG_PATH'] = self.options['pkg-config-path'] + ':' + \
env.get('PKG_CONFIG_PATH', '')
env['CMAKE_INCLUDE_PATH'] = self.options['includes']
env['CMAKE_LIBRARY_PATH'] = self.options['libraries']
env['CFLAGS'] = self.options['cflags']
command_line = [self.options['cmake-command'],
'-DCMAKE_INSTALL_PREFIX=%%s' %% self.options['location'],
'-DMYSQL_CONFIG=%%s' %% self.options['mysql-config'],
'-DCMAKE_C_FLAGS=%%s' %% self.options['cflags'],
'-DCMAKE_INSTALL_RPATH=%%s' %% self.options['libraries'],
'-DMYSQL_INCLUDE_DIR=%%s' %% self.options['mysql-include-dir'],
'-DBUILD_DOCS=OFF',
'.']
call(command_line, cwd=workdir, env=env)
call(['make'], cwd=workdir, env=env)
call(['make', 'install'], cwd=workdir, env=env)
# XXX-Antoine: here's what I did using slapos.recipe.cmmi.
# and it wasn't working !
#[mydumper]
#recipe = slapos.recipe.cmmi
#url = http://launchpad.net/mydumper/0.2/0.2.3/+download/mydumper-0.2.3.tar.gz
#md5sum = 36e6a1c97a9634a6882ddaac5e2697d5
#strip-top-level-dir = true
#location = ${buildout:parts-directory}/${:_buildout_section_name_}
#configure-command =
# ${cmake:location}/bin/cmake \
# -DCMAKE_INSTALL_PREFIX=${:location} \
# -DMYSQL_CONFIG=${mariadb:location}/bin/mysql_config \
# -DCMAKE_INCLUDE_PATH=${zlib:location}/include \
# -DCMAKE_LIBRARY_PATH=${zlib:location}/lib \
# .
#environment=
# PATH=$PATH:${buildout:bin-directory}
# PKG_CONFIG_PATH=${glib:location}/lib/pkgconfig/:${pcre:location}/lib/pkgconfig/
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -12,7 +12,7 @@
add_subdirectory(docs)
-set(CMAKE_C_FLAGS "-Wall -Wunused -Wwrite-strings -Wno-strict-aliasing -Wextra -Wshadow -Werror -O3 -g ${MYSQL_CFLAGS}")
+set(CMAKE_C_FLAGS "-Wall -Wunused -Wwrite-strings -Wno-strict-aliasing -Wextra -Wshadow -O3 -g ${MYSQL_CFLAGS}")
include_directories(${MYDUMPER_SOURCE_DIR} ${MYSQL_INCLUDE_DIR} ${GLIB2_INCLUDE_DIR} ${PCRE_INCLUDE_DIR} ${ZLIB_INCLUDE_DIRS})
[buildout]
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
../zlib/buildout.cfg
../bison/buildout.cfg
../flex/buildout.cfg
../groonga/buildout.cfg
../libtool/buildout.cfg
../ncurses/buildout.cfg
../pkgconfig/buildout.cfg
../readline/buildout.cfg
parts =
mysql-5.1
[mysql-5.1-sphinx-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
md5sum = eefcd08c400c58d3e89542ab482a8429
filename = mysql-5.1-sphinx-2.0.1-beta.diff
download-only = true
[mysql-5.1-no_test-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
md5sum = 22b0ef8baec5efc182e10d233c6f14ca
filename = mysql_create_system_tables__no_test.patch
download-only = true
[mysql-5.1]
recipe = slapos.recipe.cmmi
version = 5.1.58
url = http://mysql.he.net/Downloads/MySQL-5.1/mysql-${:version}.tar.gz
md5sum = ae5aef506088e521e4b1cc4f668e96d2
# compile directory is required to build mysql plugins.
keep-compile-dir = true
# configure: how to avoid searching for my.cnf?
# - like in mysql part in http://svn.zope.org/zodbshootout/trunk/buildout.cfg?view=markup
configure-command =
libtoolize -c -f
aclocal -I ${libtool:location}/share/aclocal -I config/ac-macros
autoheader
automake -c -a -f
autoconf
touch sql/sql_yacc.yy
./configure
# we use embeded yassl instead of openssl to avoid compilation errors on sphinx search engine.
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--disable-static
--enable-thread-safe-client
--enable-local-infile
--enable-assembler
--with-pic
--with-fast-mutexes
--with-charset=utf8
--with-collation=utf8_unicode_ci
--with-server-suffix=mysql-5.1
--without-readline
--with-ssl
--with-zlib-dir=${zlib:location}
make-options =
LIBTOOL=libtool
patch-options = -p0
patches =
${mysql-5.1-sphinx-patch:location}/${mysql-5.1-sphinx-patch:filename}
${mysql-5.1-no_test-patch:location}/${mysql-5.1-no_test-patch:filename}
environment =
PATH =${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:${bison:location}/bin:${flex:location}/bin:%(PATH)s
CPPFLAGS =-I${ncurses:location}/include -I${readline:location}/include
LDFLAGS =-L${readline:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${readline:location}/lib
[groonga-storage-engine-mysql-5.1]
recipe = slapos.recipe.cmmi
url = http://github.com/downloads/mroonga/mroonga/groonga-storage-engine-1.0.0.tar.gz
md5sum = 289b8b7919e790599ea79b6fe9270e04
configure-options =
--with-mysql-source=${mysql-5.1:location}__compile__/mysql-${mysql-5.1:version}
--with-mysql-config=${mysql-5.1:location}/bin/mysql_config
environment =
PATH=${groonga:location}/bin:${pkgconfig:location}/bin:%(PATH)s
CPPFLAGS=-I${groonga:location}/include/groonga
LDFLAGS=-L${groonga:location}/lib
PKG_CONFIG_PATH=${groonga:location}/lib/pkgconfig
# 33_scripts__mysql_create_system_tables__no_test.dpatch by <ch@debian.org>
A user with no password prevents a normal user from login under certain
circumstances as it is checked first.
See http://bugs.debian.org/301741
and http://bugs.mysql.com/bug.php?id=6901
--- scripts/mysql_system_tables_data.sql 2008-12-04 22:59:44.000000000 +0100
+++ scripts/mysql_system_tables_data.sql 2008-12-04 23:00:07.000000000 +0100
@@ -11,8 +11,6 @@
-- Fill "db" table with default grants for anyone to
-- access database 'test' and 'test_%' if "db" table didn't exist
CREATE TEMPORARY TABLE tmp_db LIKE db;
-INSERT INTO tmp_db VALUES ('%','test','','Y','Y','Y','Y','Y','Y','N','Y','Y','Y','Y','Y','Y','Y','Y','N','N','Y','Y');
-INSERT INTO tmp_db VALUES ('%','test\_%','','Y','Y','Y','Y','Y','Y','N','Y','Y','Y','Y','Y','Y','Y','Y','N','N','Y','Y');
INSERT INTO db SELECT * FROM tmp_db WHERE @had_db_table=0;
DROP TABLE tmp_db;
@@ -24,7 +22,5 @@
INSERT INTO tmp_user VALUES ('localhost','root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0,0);
REPLACE INTO tmp_user SELECT @current_hostname,'root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0,0 FROM dual WHERE LOWER( @current_hostname) != 'localhost';
REPLACE INTO tmp_user VALUES ('127.0.0.1','root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0,0);
-INSERT INTO tmp_user (host,user) VALUES ('localhost','');
-INSERT INTO tmp_user (host,user) SELECT @current_hostname,'' FROM dual WHERE LOWER(@current_hostname ) != 'localhost';
INSERT INTO user SELECT * FROM tmp_user WHERE @had_user_table=0;
DROP TABLE tmp_user;
# This is software part of buildout for mysql with senna
# Originally made by Leonardo Rochael Almeida <leorochael@gmail.com> (thanks!)
# Original place: https://svn.erp5.org/repos/public/experimental/mysqlsenna.buildout/
[buildout]
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
../zlib/buildout.cfg
../bison/buildout.cfg
../flex/buildout.cfg
../libtool/buildout.cfg
../ncurses/buildout.cfg
../openssl/buildout.cfg
../readline/buildout.cfg
parts =
mysql-tritonn-5.0
[senna]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.nexedi.org/static/tarballs/senna/senna-r1311.tar.gz
md5sum = 1cc51554789f81a9e5208da04a5c2f4c
configure-command =
echo 'AC_CONFIG_MACRO_DIR([m4])' >> configure.ac
echo 'ACLOCAL_AMFLAGS = -I m4' >> Makefile.am
ACLOCAL_ARGS=-I${libtool:location}/share/aclocal ./autogen.sh
./configure
configure-options =
--prefix=@@LOCATION@@
--without-mecab
--disable-glibtest
make-options =
LIBTOOL=libtool
environment =
PATH=${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:%(PATH)s
[mysql-5.0-tritonn-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = http://dl.sourceforge.jp/tritonn/44472/${:filename}
md5sum = 257abe9c4afdc9b08033687fd486a595
filename = tritonn-1.0.12-mysql-5.0.87.diff
download-only = true
[mysql-5.0-sphinx-patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
md5sum = 4ca071cde319213a213ab3605e326d1c
filename = mysql-5.0.87-sphinx-1.10.diff
download-only = true
[mysql-tritonn-5.0]
recipe = slapos.recipe.cmmi
shared = true
url = http://www.nexedi.org/static/tarballs/mysql/mysql-5.0.87.tar.gz
md5sum = 65e6229cc98b6a8d4c5206d7fe16c7be
# configure: how to avoid searching for my.cnf?
# - like in mysql part in http://svn.zope.org/zodbshootout/trunk/buildout.cfg?view=markup
configure-command =
libtoolize -c -f
aclocal -I ${libtool:location}/share/aclocal -I config/ac-macros
autoheader
automake -c -a -f
autoconf
touch sql/sql_yacc.yy
./configure
configure-options =
--prefix=@@LOCATION@@
--with-senna
--with-openssl=${openssl:location}
--without-mecab
--enable-thread-safe-client
--with-charset=utf8
--with-collation=utf8_unicode_ci
--with-server-suffix=mysql-tritonn-5.0
--with-mysqlmanager
--enable-assembler
--without-readline
--with-sphinx-storage-engine
--with-zlib-dir=${zlib:location}
make-options =
LIBTOOL=libtool
LN_S='ln -s'
patch-options = -p1
patches =
${mysql-5.0-tritonn-patch:location}/${mysql-5.0-tritonn-patch:filename}
${mysql-5.0-sphinx-patch:location}/${mysql-5.0-sphinx-patch:filename}
environment =
PATH=${senna:location}/bin:${autoconf:location}/bin:${automake:location}/bin:${libtool:location}/bin:${bison:location}/bin:${flex:location}/bin:%(PATH)s
CPPFLAGS=-I${senna:location}/include/senna -I${ncurses:location}/include -I${readline5:location}/include
LDFLAGS=-L${senna:location}/lib -L${readline5:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib -Wl,-rpath=${ncurses:location}/lib -Wl,-rpath=${readline5:location}/lib
[buildout]
# Work in progress
extends =
../xorg/buildout.cfg
../fontconfig/buildout.cfg
../dbus/buildout.cfg
../gtk-2/buildout.cfg
../libpng/buildout.cfg
parts =
opera
[opera]
recipe = slapos.recipe.build
slapos_promise =
file:opera
depends =
${liberation-fonts:location}
${ipaex-fonts:location}
x86 = http://arc.opera.com/pub/opera/linux/1151/opera-11.51-1087.i386.linux.tar.bz2 91db21fc001e736a6432627fbf93062f
x86-64 = todohttp://arc.opera.com/pub/opera/linux/1151/opera-11.51-1087.i386.linux.tar.bz2 91db21fc001e736a6432627fbf93062f
script =
if not self.options.get('url'): self.options['url'], self.options['md5sum'] = self.options[guessPlatform()].split(' ')
extract_dir = self.extract(self.download(self.options['url'], self.options.get('md5sum')))
workdir = guessworkdir(extract_dir)
self.copyTree(workdir, "%(location)s")
wrapper_location = os.path.join("%(location)s", "opera-slapos")
wrapper = open(wrapper_location, 'w')
wrapper.write("""#!/bin/sh
cd %(location)s
export LD_LIBRARY_PATH=${libXrender:location}/lib/:${fontconfig:location}/lib/:${dbus:location}/lib/:${dbus-glib:location}/lib/:${pango:location}/lib:${cairo:location}/lib:${glib:location}/lib:${gtk-2:location}/lib:${atk:location}/lib:${gdk-pixbuf:location}/lib:${libXt:location}/lib:${libpng15:location}/lib:${libSM:location}/lib:${libICE:location}/lib:%(location)s
export PATH=${fontconfig:location}/bin:$PATH
%(location)s/opera $*""")
wrapper.close()
profile_directory = os.path.join("%(location)s", "profile")
os.mkdir(profile_directory)
opera_config = open(os.path.join(profile_directory, "opera6.ini"), "w")
opera_config.write("""[State]
Reading Plugins=0
Accept License=1
Run=0""")
opera_config.close()
os.chmod(wrapper_location, 0766)
This diff is collapsed.
[buildout]
extends =
# ../openssl/buildout.cfg
../kerberos/buildout.cfg
# ../pkgconfig/buildout.cfg
parts =
python-kerberos
[python-kerberos]
recipe = zc.recipe.egg:custom
egg = kerberos
environment = python-kerberos-env
#setup-eggs = ${python-cryptography-prep:eggs}
[python-kerberos-env]
PATH = ${kerberos:location}/bin:%(PATH)s
#PKG_CONFIG_PATH = ${openssl:location}/lib/pkgconfig
LD_LIBRARY_PATH = ${kerberos:location}/lib
CPATH = ${kerberos:location}/include
[python-cryptography-prep]
recipe = zc.recipe.egg
eggs =
${python-cffi:egg}
enum34
pycparser
six
This component is not used anymore - but might be revived within 4 months.
If not used within 6 months, feel free to delete this.
# Sphinx - Open Source Search Server
# http://sphinxsearch.com/
[buildout]
parts = sphinx
extends =
../libexpat/buildout.cfg
../mariadb/buildout.cfg
../zlib/buildout.cfg
[sphinx-1.10-beta-snowball.patch]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/${:filename}
path = ${:filename}
md5sum = 65a5ee78bf27c030734674c018006287
download-only = true
filename = sphinx-1.10-beta-snowball.patch
[sphinx]
recipe = slapos.recipe.cmmi
url = http://sphinxsearch.com/files/sphinx-2.0.5-release.tar.gz
md5sum = e71fdb5b0c2911247d48fb30550b9584
configure-options =
--with-mysql
--with-mysql-includes=${mariadb:location}/include/mysql
--with-mysql-libs=${mariadb:location}/lib
--with-libstemmer
--with-iconv
--without-pgsql
--without-unixodbc
patch-options = -p1
patches =
${sphinx-1.10-beta-snowball.patch:location}/${sphinx-1.10-beta-snowball.patch:filename}
environment =
CPPFLAGS=-I${zlib:location}/include -I${libexpat:location}/include -fpermissive
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${mariadb:location}/lib -L${libexpat:location}/lib -Wl,-rpath=${libexpat:location}/lib
# Note: -fpermissive added to allow compilation on recent gcc 4.7+
[buildout]
parts = spidermonkey
[spidermonkey-1.7]
recipe = slapos.recipe.build
url = http://ftp.mozilla.org/pub/mozilla.org/js/js-1.7.0.tar.gz
md5sum = 5571134c3863686b623ebe4e6b1f6fe6
install =
import os
url = self.download(options['url'], options['md5sum'])
extract_dir = self.extract(url)
workdir = os.path.join(guessworkdir(extract_dir), 'src')
env = self.environ
env['JS_DIST'] = location
call(['make', '-f', 'Makefile.ref', '-j1'], cwd=workdir, env=env)
call(['make', '-f', 'Makefile.ref', 'export'], cwd=workdir, env=env)
[spidermonkey]
<= spidermonkey-1.7
[buildout]
extends = ../openssl/buildout.cfg
parts =
stunnel
[stunnel]
recipe = slapos.recipe.cmmi
shared = true
url = https://www.stunnel.org/downloads/archive/5.x/stunnel-5.14.tar.gz
md5sum = e716501960dc6856d80f92547298f724
configure-options =
--enable-ipv6
--disable-libwrap
--disable-fips
--with-ssl=${openssl-1.0:location}
environment =
CPPFLAGS=-I${zlib:location}/include
LDFLAGS=-Wl,-rpath=${openssl-1.0:location}/lib -L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib
# xtrabackup: hot backup utility for MySQL
# http://www.percona.com/
# Depends on SlapOS patched buildout for _profile_base_location_ functionality
[buildout]
extends =
../autoconf/buildout.cfg
../automake/buildout.cfg
../bison/buildout.cfg
../flex/buildout.cfg
../libtool/buildout.cfg
../ncurses/buildout.cfg
../patch/buildout.cfg
../readline/buildout.cfg
../zlib/buildout.cfg
parts =
xtrabackup
[mysql-5.1-download]
recipe = hexagonit.recipe.download
ignore-existing = true
version = 5.1.56
url = http://s3.amazonaws.com/percona.com/downloads/community/mysql-${:version}.tar.gz
md5sum = 15161d67f4830aad3a8a89e083749d49
download-only = true
filename = mysql-${:version}.tar.gz
[libtar-download]
recipe = hexagonit.recipe.download
ignore-existing = true
version = 1.2.11
url = http://s3.amazonaws.com/percona.com/downloads/community/libtar-${:version}.tar.gz
md5sum = 604238e8734ce6e25347a58c4f1a1d7e
download-only = true
filename = libtar-${:version}.tar.gz
[xtrabackup]
recipe = slapos.recipe.cmmi
url = http://www.percona.com/downloads/XtraBackup/XtraBackup-1.6.3/source/xtrabackup-1.6.3.tar.gz
md5sum = d0b827fd18cd76416101eb7b7c56a311
make-binary = true
patches =
${:_profile_base_location_}/xtrabackup-1.6.2_build.patch#b1536fe65e32592e4a0a14bf3b159885
${:_profile_base_location_}/xtrabackup-allow_force_ibbackup.patch#d642ea7b30d1322a516fbece4ee100e0
patch-options = -p1
location = ${buildout:parts-directory}/${:_buildout_section_name_}
configure-command = ln -sf ${mysql-5.1-download:location}/${mysql-5.1-download:filename} ${libtar-download:location}/${libtar-download:filename} . && utils/build.sh innodb51_builtin ${:location} ${libtool:location}
environment =
CPPFLAGS =-I${zlib:location}/include -I${ncurses:location}/include -I${readline:location}/include
LDFLAGS =-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib -L${readline:location}/lib -Wl,-rpath=${readline:location}/lib
PATH=${autoconf:location}/bin:${automake:location}/bin:${bison:location}/bin:${libtool:location}/bin:${flex:location}/bin:${patch:location}/bin:%(PATH)s
make-options =
-j1
diff -ur xtrabackup-1.6.2.orig/Makefile xtrabackup-1.6.2/Makefile
--- xtrabackup-1.6.2.orig/Makefile 2011-07-19 05:16:22.000000000 +0900
+++ xtrabackup-1.6.2/Makefile 2011-07-27 17:58:38.108925111 +0900
@@ -137,5 +137,5 @@
clean:
rm -f *.o xtrabackup_*
install:
- install -m 755 innobackupex-1.5.1 $(BIN_DIR)
+ install -m 755 innobackupex-1.5.1 $(BIN_DIR)/innobackupex
install -m 755 xtrabackup_* $(BIN_DIR)
diff -ur xtrabackup-1.6.2.orig/utils/build.sh xtrabackup-1.6.2/utils/build.sh
--- xtrabackup-1.6.2.orig/utils/build.sh 2011-07-19 05:16:22.000000000 +0900
+++ xtrabackup-1.6.2/utils/build.sh 2011-07-27 18:01:53.809212142 +0900
@@ -21,12 +21,14 @@
{
echo "Build an xtrabackup binary against the specified InnoDB flavor."
echo
- echo "Usage: `basename $0` CODEBASE"
+ echo "Usage: `basename $0` CODEBASE PREFIX LIBTOOL_LOCATION"
echo "where CODEBASE can be one of the following values or aliases:"
echo " innodb51_builtin | 5.1 build against built-in InnoDB in MySQL 5.1"
echo " innodb55 | 5.5 build against InnoDB in MySQL 5.5"
echo " xtradb51 | xtradb build against Percona Server with XtraDB 5.1"
echo " xtradb55 | xtradb55 build against Percona Server with XtraDB 5.5"
+ echo "where PREFIX is abolute path for install location"
+ echo "where LIBTOOL_LOCATION is abolute path of libtool"
exit -1
}
@@ -79,7 +81,11 @@
{
echo "Configuring the server"
cd $server_dir
- BUILD/autorun.sh
+ aclocal -I $libtool_location/share/aclocal -I config/ac-macros || die "Can't execute aclocal"
+ autoheader || die "Can't execute autoheader"
+ libtoolize --automake --force --copy || die "Can't execute libtoolize"
+ automake --add-missing --force --copy || die "Can't execute automake"
+ autoconf || die "Can't execute autoconf"
eval $configure_cmd
echo "Building the server"
@@ -92,12 +99,13 @@
echo "Building XtraBackup"
mkdir $build_dir
cp $top_dir/Makefile $top_dir/xtrabackup.c $build_dir
+ cp $top_dir/innobackupex $build_dir/innobackupex-1.5.1
# Read XTRABACKUP_VERSION from the VERSION file
. $top_dir/VERSION
cd $build_dir
- $MAKE_CMD $xtrabackup_target XTRABACKUP_VERSION=$XTRABACKUP_VERSION
+ $MAKE_CMD $xtrabackup_target XTRABACKUP_VERSION=$XTRABACKUP_VERSION PREFIX=$1
cd $top_dir
}
@@ -106,11 +114,36 @@
echo "Building tar4ibd"
unpack_and_patch libtar-1.2.11.tar.gz tar4ibd_libtar-1.2.11.patch
cd libtar-1.2.11
- ./configure
+ ./configure --prefix=$1
$MAKE_CMD
cd $topdir
}
+function install_server()
+{
+ echo "Installing the server"
+ cd $server_dir
+ $MAKE_CMD install
+ cd $top_dir
+}
+
+function install_xtrabackup()
+{
+ echo "Installing XtraBackup"
+ echo $build_dir
+ cd $build_dir
+ $MAKE_CMD PREFIX=$1 install
+ cd $top_dir
+}
+
+function install_tar4ibd()
+{
+ echo "Installing tar4ibd"
+ cd libtar-1.2.11
+ $MAKE_CMD install
+ cd $topdir
+}
+
################################################################################
# Do all steps to build the server, xtrabackup and tar4ibd
# Expects the following variables to be set before calling:
@@ -141,9 +174,15 @@
build_server
- build_xtrabackup
+ build_xtrabackup $1
+
+ build_tar4ibd $1
+
+ install_server
+
+ install_xtrabackup $1
- build_tar4ibd
+ install_tar4ibd
}
if ! test -f xtrabackup.c
@@ -153,6 +192,15 @@
fi
type=$1
+prefix=$2
+if [ "x$prefix" == "x" ] ; then
+ usage
+fi
+libtool_location=$3
+if [ "x$libtool_location" == "x" ] ; then
+ usage
+fi
+
top_dir=`pwd`
case "$type" in
@@ -166,9 +214,10 @@
--with-plugins=innobase \
--with-zlib-dir=bundled \
--enable-shared \
- --with-extra-charsets=all"
+ --with-extra-charsets=all \
+ --prefix=$2"
- build_all
+ build_all $2
;;
"innodb55" | "5.5")
commit c052380cf84e33b32268af97a7c1539c8a6771e5
Author: Łukasz Nowak <luke@nexedi.com>
Date: Mon Jun 6 13:29:32 2011 +0200
Really use option_ibbackup_binary.
diff --git a/innobackupex b/innobackupex
index 67d0d44..ed06294 100755
--- a/innobackupex
+++ b/innobackupex
@@ -214,6 +214,7 @@ check_args();
print_version();
# initialize global variables and perform some checks
+if ( $option_ibbackup_binary eq "autodetect" ){
if ($option_copy_back) {
$option_ibbackup_binary = 'xtrabackup_51';
} elsif ($option_apply_log) {
@@ -250,6 +251,7 @@ if ($option_copy_back) {
} else {
$option_ibbackup_binary = set_xtrabackup_version();
}
+}
init();
my $ibbackup_exit_code = 0;
......@@ -19,8 +19,8 @@ parts =
[metabase.jar]
recipe = slapos.recipe.build:download
url = https://downloads.metabase.com/v0.44.1/metabase.jar
md5sum = a97e8e67a0cc647f7c44ca787eb9a41a
url = https://downloads.metabase.com/v0.45.2/metabase.jar
md5sum = cca1f4d663ebfa60b3a6d93705b340a0
[instance-profile]
recipe = slapos.recipe.template
......
......@@ -14,11 +14,7 @@
# not need these here).
[template]
filename = instance.cfg
md5sum = 072b276e0b0dd4b4a96a50348a04c7a7
[template-monitor]
_update_hash_filename_ = instance-monitor.cfg.jinja2
md5sum = 5afba85a7aca9e716f61d59a524adc12
md5sum = e1dd16a6f50468959b5c4572b8c82f23
[json-test-template]
_update_hash_filename_ = json-test-template.json.in.jinja2
......@@ -26,7 +22,11 @@ md5sum = 2eb5596544d9c341acf653d4f7ce2680
[template-monitor-edgetest-basic]
_update_hash_filename_ = instance-monitor-edgetest-basic.cfg.jinja2
md5sum = efc528296ddf7fade335d5f4241c1828
md5sum = 6b933beb0744d97c7760e4601298e137
[template-node-monitoring]
_update_hash_filename_ = instance-node-monitoring.jinja2.cfg
md5sum = 2d8bd1224472983e54f36770d3e3f969
[network-bench-cfg]
filename = network_bench.cfg.in
......
{
"$schema": "http://json-schema.org/draft-04/schema#",
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Input Parameters",
"type": "object",
"properties": {
"dummy": {
"title": "dummy",
"description": "Dummy",
"type": "string"
"promise_cpu_temperature_frequency": {
"default": "2",
"title": "Frequency at which CPU temperature promise will be run (unit: minutes)",
"description": "Frequency at which CPU temperature promise will be run (unit: minutes)",
"type": "number"
},
"promise_cpu_temperature_threshold": {
"default": "90",
"title": "CPU temperature promise threshold (unit: Celsius)",
"description": "Temperature threshold above which CPU temperature promise will fail (unit: Celsius)",
"type": "number"
},
"promise_cpu_avg_temperature_threshold": {
"default": "80",
"title": "Average CPU temperature promise threshold (unit: Celsius)",
"description": "If average temperature over specified duration reaches this threshold, promise will fail (unit: Celsius)",
"type": "number"
},
"promise_cpu_avg_temperature_threshold_duration": {
"default": "600",
"title": "Average CPU temperature promise threshold duration (unit: secondes)",
"description": "Duration during which average temperature should not exceed specified threshold (unit: secondes)",
"type": "number"
},
"promise_ram_available_frequency": {
"default": "2",
"title": "Frequency at which RAM availalble promise will be run (unit: minutes)",
"description": "Frequency at which RAM availalble promise will be run (unit: minutes)",
"type": "number"
},
"promise_ram_available_threshold": {
"default": "500",
"title": "Minimum available RAM promise threshold (unit: MB)",
"description": "Minimum available RAM threshold below which RAM available promise will fail (unit: MB)",
"type": "number"
},
"promise_ram_avg_available_threshold": {
"default": "1e3",
"title": "Minimum average available RAM promise threshold (unit: MB)",
"description": "If average available ram over specified duration reaches this threshold, promise will fail (unit: MB)",
"type": "number"
},
"promise_ram_avg_available_threshold_duration": {
"default": "600",
"title": "Minimum average available RAM promise threshold duration (unit: secondes)",
"description": "Duration during which average available RAM shall not be below specified threshold (unit: secondes)",
"type": "number"
},
"promise_network_errors_frequency": {
"default": "5",
"title": "Frequency at which network errors promise will be run (unit: minutes)",
"description": "Frequency at which network errors promise will be run (unit: minutes)",
"type": "number"
},
"promise_network_errors_threshold": {
"default": "100",
"title": "Maximum errors on Network per MB (unit: N/A)",
"description": "Maximum network errors threshold(per MB) above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_network_lost_packets_threshold": {
"default": "100",
"title": "Maximum lost packets on Network per MB (unit: N/A)",
"description": "Maximum network lost packets threshold(per MB) above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_network_transit_frequency": {
"default": "1",
"title": "Frequency at which network transit promise will be run (unit: minutes)",
"description": "Frequency at which network transit promise will be run (unit: minutes)",
"type": "number"
},
"promise_network_transit_max_data_threshold": {
"default": "1e6",
"title": "Maximum data amount on network (unit: MB)",
"description": "Maximum data amount on network below which the promise will fail (unit: MB)",
"type": "number"
},
"promise_network_transit_min_data_threshold": {
"default": "0",
"title": "Minimum data amount on network (unit: MB)",
"description": "Minimum data amount on network below which the promise will fail (unit: MB)",
"type": "number"
},
"promise_network_transit_duration": {
"default": "600",
"title": "Duration to check thresholds (unit: secondes)",
"description": "Duration during which data amount on network shall not be above maximum and below minimum (unit: secondes)",
"type": "number"
},
"promise_cpu_load_frequency": {
"default": "3",
"title": "Frequency at which CPU load promise will be run (unit: minutes)",
"description": "Frequency at which CPU load promise will be run (unit: minutes)",
"type": "number"
},
"promise_cpu_load_threshold": {
"default": "1.5",
"title": "Maximum CPU load ratio threshold (unit: N/A)",
"description": "Maximum CPU load ratio threshold above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_monitor_space_frequency": {
"default": "60",
"title": "Frequency at which monitor space promise will be run (unit: minutes)",
"description": "Frequency at which monitor space promise will be run (unit: minutes)",
"type": "number"
},
"promise_partition_space_threshold": {
"default": "0.08",
"title": "Minimum partition space ratio threshold (unit: N/A)",
"description": "Minimum partition space ratio threshold below which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_free_disk_space_frequency": {
"default": "60",
"title": "Frequency at which free disk space promise will be run (unit: minutes)",
"description": "Frequency at which free disk space promise will be run (unit: minutes)",
"type": "number"
},
"promise_free_disk_space_threshold": {
"default": "0.08",
"title": "Minimum partition space ratio threshold (unit: N/A)",
"description": "Minimum partition space ratio threshold below which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_free_disk_space_nb_days_predicted": {
"default": "10",
"title": "Number of days for prediction (unit: Days)",
"description": "Number of days (unit: Days) that will be take into account for prediction calculation",
"type": "number"
},
"promise_free_disk_space_display_partition": {
"default": true,
"title": "Boolean to display partition (unit: N/A)",
"description": "Enable partition display by setting boolean to True (unit: N/A)",
"type": "boolean"
},
"promise_free_disk_space_display_prediction": {
"default": true,
"title": "Boolean to display prediction (unit: N/A)",
"description": "Enable prediction display by setting boolean to True (unit: N/A)",
"type": "boolean"
}
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by Re6st Master instanciation",
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Values returned by node-monitoring instanciation",
"type": "object",
"properties": {
"re6stry-url": {
"description": "ipv6 url to access your re6st registry service",
"type": "string"
"promise_cpu_temperature_frequency": {
"description": "Frequency at which CPU temperature promise will be run (unit: minutes)",
"type": "number"
},
"promise_cpu_temperature_threshold": {
"description": "Temperature threshold above which CPU temperature promise will fail (unit: Celsius)",
"type": "number"
},
"promise_cpu_avg_temperature_threshold": {
"description": "If average temperature over specified duration reaches this threshold, promise will fail (unit: Celsius)",
"type": "number"
},
"promise_cpu_avg_temperature_threshold_duration": {
"description": "Duration during which average temperature should not exceed specified threshold (unit: secondes)",
"type": "number"
},
"promise_ram_available_frequency": {
"description": "Frequency at which RAM availalble promise will be run (unit: minutes)",
"type": "number"
},
"promise_ram_available_threshold": {
"description": "Minimum available RAM threshold below which RAM available promise will fail (unit: MB)",
"type": "number"
},
"promise_ram_avg_available_threshold": {
"description": "If average available ram over specified duration reaches this threshold, promise will fail (unit: MB)",
"type": "number"
},
"promise_ram_avg_available_threshold_duration": {
"description": "Duration during which average available RAM shall not be below specified threshold (unit: secondes)",
"type": "number"
},
"promise_network_errors_frequency": {
"description": "Frequency at which network errors promise will be run (unit: minutes)",
"type": "number"
},
"promise_network_errors_threshold": {
"description": "Maximum network errors threshold(per MB) above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_network_lost_packets_threshold": {
"description": "Maximum network lost packets threshold(per MB) above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_network_transit_frequency": {
"description": "Frequency at which network transit promise will be run (unit: minutes)",
"type": "number"
},
"promise_network_transit_max_data_threshold": {
"description": "Maximum data amount on network below which the promise will fail (unit: MB)",
"type": "number"
},
"promise_network_transit_min_data_threshold": {
"description": "Minimum data amount on network below which the promise will fail (unit: MB)",
"type": "number"
},
"promise_network_transit_duration": {
"description": "Duration during which data amount on network shall not be above maximum and below minimum (unit: secondes)",
"type": "number"
},
"promise_cpu_load_frequency": {
"description": "Frequency at which CPU load promise will be run (unit: minutes)",
"type": "number"
},
"promise_cpu_load_threshold": {
"description": "Maximum CPU load ratio threshold above which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_monitor_space_frequency": {
"description": "Frequency at which monitor space promise will be run (unit: minutes)",
"type": "number"
},
"promise_partition_space_threshold": {
"description": "Minimum partition space ratio threshold below which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_free_disk_space_frequency": {
"description": "Frequency at which free disk space promise will be run (unit: minutes)",
"type": "number"
},
"promise_free_disk_space_threshold": {
"description": "Minimum partition space ratio threshold below which the promise will fail (unit: N/A)",
"type": "number"
},
"promise_free_disk_space_nb_days_predicted": {
"description": "Number of days (unit: Days) that will be take into account for prediction calculation",
"type": "number"
},
"promise_free_disk_space_display_partition": {
"description": "Enable partition display by setting boolean to True (unit: N/A)",
"type": "boolean"
},
"promise_free_disk_space_display_prediction": {
"description": "Enable prediction display by setting boolean to True (unit: N/A)",
"type": "boolean"
}
},
"type": "object"
}
}
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Values returned by Varnish instanciation",
"description": "Values returned by Re6st Master instanciation",
"properties": {
"url": {
"description": "Varnish HTTP service access information",
"re6stry-url": {
"description": "ipv6 url to access your re6st registry service",
"type": "string"
}
},
......
......@@ -135,8 +135,43 @@ command = ${surykatka-status-json-{{ class }}:output}
{%- endfor %} {#- for class, class_check_list in CHECK_DICT.items() #}
[buildout]
extends = {{ instance_base_monitor }}
parts +=
extends = {{ monitor_template }}
parts =
publish-connection-information
{% for part_id in sorted(PART_LIST) %}
{{ part_id }}
{% endfor %}
[publish-connection-information]
recipe = slapos.cookbook:publish
monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
server_log_url = ${monitor-publish-parameters:monitor-base-url}/${slap-configuration:private-hash}/
[monitor-directory]
service = ${buildout:directory}/etc/service
var = ${buildout:directory}/var
srv = ${buildout:directory}/srv
server-log = ${:private}/server-log
monitor-log = ${:private}/monitor-log
system-log = ${:private}/system-log
consumption = ${:log}/consumption
[pwgen]
recipe = slapos.cookbook:generate.password
user = admin
bytes = 16
[pwgen32]
recipe = slapos.cookbook:generate.password
user = admin
bytes = 16
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
private-hash = ${pwgen:passwd}${pwgen32:passwd}
frontend-domain =
[buildout]
extends = {{ monitor_template_output }}
parts =
cron
cron-network-bench
symlink-re6st-logs
symlink-collected-logs
python-symlink
monitor-collect-csv-wrapper
monitor-base
monitor-check-memory-usage
monitor-check-cpu-usage
publish-connection-information
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[cron-network-bench]
<= cron
recipe = slapos.cookbook:cron.d
name = network-bench-test
frequency = */10 * * * *
# skip to not fill cron log file.
# command = {{ buildout_bin }}/networkbench ${network-bench-configuration:output} ${monitor-directory:monitor-log}
command = true
[symlink-re6st-logs]
recipe = cns.recipe.symlink
symlink = /var/log/re6stnet = ${monitor-directory:system-log}/re6stnet
autocreate = true
[symlink-collected-logs]
recipe = cns.recipe.symlink
symlink = /srv/slapgrid/var/data-log = ${monitor-directory:server-log}/data-log
autocreate = true
[network-bench-configuration]
recipe = slapos.recipe.template:jinja2
url = {{ network_benck_cfg_output }}
output = ${monitor-directory:etc}/network_bench.cfg
context =
key slapparameter_dict slap-configuration:configuration
[pwgen]
recipe = slapos.cookbook:generate.password
user = admin
bytes = 16
[pwgen32]
recipe = slapos.cookbook:generate.password
user = admin
bytes = 16
[monitor-instance-parameter]
monitor-httpd-port = {{ slap_configuration['configuration.monitor-base-port'] }}
[monitor-directory]
service = ${buildout:directory}/etc/service
var = ${buildout:directory}/var
srv = ${buildout:directory}/srv
server-log = ${:private}/server-log
monitor-log = ${:private}/monitor-log
system-log = ${:private}/system-log
consumption = ${:log}/consumption
[python-symlink]
recipe = plone.recipe.command
command = ln -sf {{ buildout_bin }}/pythonwitheggs ${monitor-directory:bin}/python
update-command = ${:command}
[monitor-collect-csv-wrapper]
recipe = slapos.cookbook:wrapper
command-line =
${monitor-directory:bin}/python {{ monitor_collect_csv_dump }} --output_folder ${monitor-directory:consumption}
wrapper-path = ${monitor-directory:bin}/monitor-collect-csv-dump
[monitor-check-memory-usage]
<= monitor-promise-base
promise = check_command_execute
name = check-computer-memory-usage.py
config-command = {{ buildout_bin}}/check-computer-memory -db ${monitor-instance-parameter:collector-db} --threshold ${slap-parameter:memory-percent-threshold} --unit percent
[monitor-check-cpu-usage]
recipe = slapos.cookbook:promise.plugin
eggs =
slapos.toolbox
module = slapos.promise.plugin.check_server_cpu_load
output = ${directory:plugins}/system-CPU-load-check.py
config-cpu-load-threshold = ${slap-parameter:cpu-load-threshold}
[publish-connection-information]
recipe = slapos.cookbook:publish
monitor-setup-url = https://monitor.app.officejs.com/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
server_log_url = ${monitor-publish-parameters:monitor-base-url}/${slap-configuration:private-hash}/
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = ${slap-connection:computer-id}
partition = ${slap-connection:partition-id}
url = ${slap-connection:server-url}
key = ${slap-connection:key-file}
cert = ${slap-connection:cert-file}
private-hash = ${pwgen:passwd}${pwgen32:passwd}
frontend-domain =
[slap-parameter]
# Max cpu load for one core on server
cpu-load-threshold = 3.0
memory-percent-threshold = 96
[buildout]
parts =
directory
check-disk-space.py
check-partition-space.py
check-cpu-temperature.py
check-ram-usage.py
check-network-errors.py
check-network-transit.py
check-cpu-load.py
publish-connection-information
extends = {{ monitor_template }}
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = {{ slap_connection['computer-id'] }}
partition = {{ slap_connection['partition-id'] }}
url = {{ slap_connection['server-url'] }}
key = {{ slap_connection['key-file'] }}
cert = {{ slap_connection['cert-file'] }}
[directory]
recipe = slapos.cookbook:mkdirectory
software = {{ buildout_directory }}
home = ${buildout:directory}
etc = ${:home}/etc
var = ${:home}/var
etc = ${:home}/etc
bin = ${:home}/bin
run = ${:var}/run
script = ${:etc}/run
service = ${:etc}/service
promise = ${:etc}/promise
log = ${:var}/log
[publish-connection-information]
recipe = slapos.cookbook:publish.serialised
<= monitor-publish
[macro.promise]
<= monitor-promise-base
name = ${:_buildout_section_name_}
[check-disk-space.py]
<= macro.promise
eggs = slapos.toolbox[prediction]
promise = check_free_disk_space
config-collectordb = ${monitor-instance-parameter:collector-db}
config-frequency = {{ slapparameter_dict.get("promise_free_disk_space_frequency", 60) }}
config-threshold = {{ slapparameter_dict.get("promise_free_disk_space_threshold", 0.08) }}
config-nb-days-predicted = {{ slapparameter_dict.get("promise_free_disk_space_nb_days_predicted", 10) }}
config-display-partition = {{ int(slapparameter_dict.get("promise_free_disk_space_display_partition", 1)) }}
config-display-prediction = {{ int(slapparameter_dict.get("promise_free_disk_space_display_prediction", 1)) }}
[check-partition-space.py]
<= macro.promise
eggs = slapos.toolbox[pandas]
promise = monitor_partition_space
config-collectordb = ${monitor-instance-parameter:collector-db}
config-frequency = {{ slapparameter_dict.get("promise_monitor_space_frequency", 60) }}
config-threshold-ratio = {{ slapparameter_dict.get("promise_partition_space_threshold", 0.08) }}
[check-cpu-temperature.py]
<= macro.promise
promise = check_cpu_temperature
config-testing = false
config-frequency = {{ slapparameter_dict.get("promise_cpu_temperature_frequency", 5) }}
config-max-spot-temp = {{ slapparameter_dict.get("promise_cpu_temperature_threshold", 90) }}
config-max-avg-temp = {{ slapparameter_dict.get("promise_cpu_avg_temperature_threshold", 80) }}
config-avg-temp-duration = {{ slapparameter_dict.get("promise_cpu_avg_temperature_threshold_duration", 600) }}
config-last-avg-computation-file = ${directory:var}/promise_cpu_temperature_last_avg_file
[check-ram-usage.py]
<= macro.promise
promise = check_ram_usage
config-frequency = {{ slapparameter_dict.get("promise_ram_available_frequency", 2) }}
config-min-threshold-ram = {{ slapparameter_dict.get("promise_ram_available_threshold", 500) }}
config-min-avg-ram = {{ slapparameter_dict.get("promise_ram_avg_available_threshold", 1e3) }}
config-avg-ram-period = {{ slapparameter_dict.get("promise_ram_avg_available_threshold_duration", 600) }}
config-last-avg-ram-file = ${directory:var}/promise_ram_space_last_avg_file
[check-network-errors.py]
<= macro.promise
promise = check_network_errors_packets
config-frequency = {{ slapparameter_dict.get("promise_network_errors_frequency", 5) }}
config-max-error-messages-per-MB = {{ slapparameter_dict.get("promise_network_errors_threshold", 100) }}
config-max-lost-packets-per-MB = {{ slapparameter_dict.get("promise_network_lost_packets_threshold", 100) }}
[check-network-transit.py]
<= macro.promise
promise = check_network_transit
config-frequency = {{ slapparameter_dict.get("promise_network_transit_frequency", 5) }}
config-max-data-amount = {{ slapparameter_dict.get("promise_network_transit_max_data_threshold", 1e6) }}
config-min-data-amount = {{ slapparameter_dict.get("promise_network_transit_min_data_threshold", 0) }}
config-transit-period = {{ slapparameter_dict.get("promise_network_transit_duration", 600) }}
config-last-transit-file = ${directory:var}/promise_network_last_transit_file
[check-cpu-load.py]
<= macro.promise
promise = check_server_cpu_load
config-frequency = {{ slapparameter_dict.get("promise_cpu_load_frequency", 3) }}
config-cpu-load-threshold = {{ slapparameter_dict.get("promise_cpu_load_threshold", 1.5) }}
......@@ -7,24 +7,10 @@ develop-eggs-directory = ${buildout:develop-eggs-directory}
[switch_softwaretype]
recipe = slapos.cookbook:switch-softwaretype
default = instance-base-monitor:output
default = instance-node-monitoring:output
edgetest-basic = instance-edgetest-basic:output
RootSoftwareInstance = $${:default}
[instance-base-monitor]
recipe = slapos.recipe.template:jinja2
url = ${template-monitor:target}
output = $${buildout:directory}/template-base-monitor.cfg
extensions = jinja2.ext.do
context = key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
section slap_configuration slap-configuration
raw buildout_bin ${buildout:bin-directory}
raw monitor_template_output ${monitor-template:output}
raw network_benck_cfg_output ${network-bench-cfg:output}
raw monitor_collect_csv_dump ${monitor-collect-csv-dump:target}
[instance-template]
recipe = slapos.recipe.template:jinja2
extensions = jinja2.ext.do
......@@ -36,13 +22,16 @@ context =
key eggs_directory buildout:eggs-directory
key slapparameter_dict slap-configuration:configuration
key slap_software_type slap-configuration:slap-software-type
key instance_base_monitor instance-base-monitor:output
raw buildout_bin ${buildout:bin-directory}
raw monitor_template ${monitor2-template:output}
$${:extra-context}
[surykatka]
binary = ${buildout:bin-directory}/${surykatka:script-name}
ini = ${template-surykatka-ini:target}
[instance-node-monitoring]
<= instance-template
url = ${template-node-monitoring:target}
extra-context =
raw buildout_directory ${buildout:directory}
section slap_connection slap-connection
[instance-edgetest-basic]
<= instance-template
......@@ -52,6 +41,10 @@ extra-context =
key template_surykatka_ini surykatka:ini
key surykatka_binary surykatka:binary
[surykatka]
binary = ${buildout:bin-directory}/${surykatka:script-name}
ini = ${template-surykatka-ini:target}
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
......
......@@ -4,31 +4,35 @@ extends =
buildout.hash.cfg
../../component/pycurl/buildout.cfg
../../component/python-cryptography/buildout.cfg
../../component/python3/buildout.cfg
../../component/pandas/buildout.cfg
../../component/scipy/buildout.cfg
../../component/statsmodels/buildout.cfg
../../component/defaults.cfg
../../stack/monitor/buildout.cfg
../../stack/slapos.cfg
../../component/python3/buildout.cfg
parts =
slapos-cookbook
network-bench-cfg
json-test-template
template
template-monitor
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg
[template-monitor]
[template-monitor-edgetest-basic]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
[template-monitor-edgetest-basic]
[template-surykatka-ini]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
[template-surykatka-ini]
[template-node-monitoring]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${:_update_hash_filename_}
......@@ -68,3 +72,14 @@ miniupnpc = 2.0.2
peewee = 3.14.4
python-whois = 0.7.3
future = 0.18.2
# Build GCC with Fortran for OpenBLAS (scipy & numpy)
[gcc]
max_version = 0
[slapos-toolbox-dependencies]
eggs +=
${pandas:egg}
${statsmodels:egg}
${scipy:egg}
{
"name": "Monitor",
"description": "Software release for Monitoring purpose",
"serialisation": "xml",
"serialisation": "json-in-xml",
"software-type": {
"default": {
"title": "Default",
......@@ -14,7 +14,7 @@
"title": "Edge Test Basic",
"description": "Basic URL monitoring configuration",
"request": "instance-edgetest-basic-input-schema.json",
"response": "instance-default-output-schema.json",
"response": "instance-edgetest-basic-output-schema.json",
"serialisation": "json-in-xml",
"index": 1
}
......
......@@ -533,3 +533,41 @@ URL =
self.assertSurykatkaPromises()
self.assertSurykatkaCron()
self.assertConnectionParameterDict()
class TestNodeMonitoring(SlapOSInstanceTestCase):
"""Test class for node monitoring instanciation"""
@classmethod
def getInstanceParameterDict(cls):
return {'_': json.dumps({
'promise_cpu_temperature_frequency': 2,
'promise_cpu_temperature_threshold': 90,
'promise_cpu_avg_temperature_threshold': 80,
'promise_cpu_avg_temperature_threshold_duration': 600,
'promise_ram_available_frequency': 2,
'promise_ram_available_threshold': 500,
'promise_ram_avg_available_threshold': 1e3,
'promise_ram_avg_available_threshold_duration': 600,
'promise_network_errors_frequency': 5,
'promise_network_errors_threshold': 100,
'promise_network_lost_packets_threshold': 100,
'promise_network_transit_frequency': 1,
'promise_network_transit_max_data_threshold': 1e6,
'promise_network_transit_min_data_threshold': 0,
'promise_network_transit_duration': 600,
'promise_cpu_load_threshold': 1.5,
'promise_monitor_space_frequency': 5,
'promise_partition_space_threshold': 0.08,
'promise_free_disk_space_frequency': 3,
'promise_free_disk_space_threshold': 0.08,
'promise_free_disk_space_nb_days_predicted': 10,
'promise_free_disk_space_display_partition': True,
'promise_free_disk_space_display_prediction': True,
})}
@classmethod
def getInstanceSoftwareType(cls):
return 'default'
def test_node_monitoring_instance(self):
pass
......@@ -414,7 +414,7 @@ tests =
matomo ${slapos.test.matomo-setup:setup}
metabase ${slapos.test.metabase-setup:setup}
monitor ${slapos.test.monitor-setup:setup}
mosquitto ${slapos.test.mosquitto-setup:setup}
mosquitto ${slapos.test.mosquitto-setup:setup}
nextcloud ${slapos.test.nextcloud-setup:setup}
nginx-push-stream ${slapos.test.nginx-push-stream-setup:setup}
ors-amarisoft ${slapos.test.ors-amarisoft-setup:setup}
......
......@@ -24,6 +24,9 @@ extends =
../../component/openvpn/buildout.cfg
../../component/babeld/buildout.cfg
../../component/bridge-utils/buildout.cfg
../../component/pandas/buildout.cfg
../../component/statsmodels/buildout.cfg
../../component/scipy/buildout.cfg
../../stack/slapos.cfg
../../stack/caucase/buildout.cfg
../../stack/nxdtest.cfg
......@@ -35,6 +38,10 @@ parts =
phantomjs
template
[gcc]
# Always build GCC for Fortran (see openblas).
max_version = 0
[bootstrap-slapos.recipe.cmmi]
# install our develop version of slapos.recipe.cmmi before anything else,
# otherwise it will be installed from pypi by dependencies.
......@@ -153,6 +160,9 @@ eggs +=
${slapcache-setup:egg}
${slapos.rebootstrap-setup:egg}
${rubygemsrecipe-setup:egg}
${pandas:egg}
${statsmodels:egg}
${scipy:egg}
zope.testing
supervisor
${extra-eggs:eggs}
......
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
parts = instance
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
source = ${application:location}
httpd_binary = ${apache:location}/bin/httpd
mysql_base_directory = ${mariadb:location}
mysql_binary = ${mariadb:location}/bin/mysql
mysql_install_binary = ${mariadb:location}/scripts/mysql_install_db
mysql_upgrade_binary = ${mariadb:location}/bin/mysql_upgrade
mysqld_binary = ${mariadb:location}/bin/mysqld
--- old/includes/installer/WebInstaller.php 2011-10-11 11:36:29.173220293 +0100
+++ new/includes/installer/WebInstaller.php 2011-10-11 11:34:32.000000000 +0100
@@ -152,6 +152,7 @@
$ls->setGroupRights( $group, $rightsArr );
}
echo $ls->getText();
+ file_put_contents(realpath(".") . "/LocalSettings.php", $ls->getText());
return $this->session;
}
[buildout]
versions = versions
parts =
template
apache-php
mariadb
eggs
instance-recipe-egg
downloadcache-workaround
mediawiki-patch
patch
extends =
../../stack/lamp.cfg
[mediawiki-patch]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/patch/mediawiki-1.17.0.config.patch
md5sum = fd606666ac9fc54cb84cda8cf08edef4
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = mediawiki-1.17.0.config.patch
[application]
recipe = slapos.recipe.build:download-unpacked
url = http://download.wikimedia.org/mediawiki/1.17/mediawiki-1.17.0.tar.gz
md5sum = 871a00a8eb6dcae1b7b654ae635af7cd
[patch]
recipe = iw.recipe.cmd
on_install = true
on_update = true
cmds= patch -d ${application:location} -p1 < ${mediawiki-patch:location}/${mediawiki-patch:filename}
[instance-recipe]
egg = slapos.cookbook
module = lamp.simple
[template]
# Default template for the instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
#md5sum = Student shall put md5 of instance.cfg here
output = ${buildout:directory}/template.cfg
mode = 0644
[instance-recipe-egg]
recipe = zc.recipe.egg
eggs = ${instance-recipe:egg}
[versions]
# Use SlapOS patched zc.buildout
Jinja2 = 2.6
MySQL-python = 1.2.3
Werkzeug = 0.8.1
buildout-versions = 1.7
hexagonit.recipe.cmmi = 1.5.0
meld3 = 0.6.7
plone.recipe.command = 1.1
slapos.recipe.template = 2.2
[downloadcache-workaround]
# workaround irritating problem of hexagonit.recipe.cmmi which automatically
# creates download cache, which in turn switches builout to "semi-offline" mode
recipe = plone.recipe.command
# in hexagonit.recipe.cmmi if there is no ${buildout:download-cache} set it resolves
# to ${buildout:directory}/downloads but this variable is available late, that's
# why it is hardcoded only for required case
download-cache = ${buildout:directory}/downloads
command = [ -d ${:download-cache} ] && rm -fr ${:download-cache}/* || exit 0
update-command = ${:command}
stop-on-error = True
[buildout]
extends =
../../stack/accords/buildout.cfg
parts =
instance
instance-accords
accords
eggs
accords-manifest
[accords-manifest]
# Manifest for xwiki
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/xwiki.manifest.xml
#md5sum = 3e1ea477d48080e9bdb98579f7f28be6
<?xml version="1.0" encoding="UTF8"?>
<manifest name="test2" xmlns="http://www.compatibleone.fr/schemes/cords.xsd">
<description>test 1 : open stack small compute</description>
<node name="test2" type="simple" access="public" scope="normal" provider="openstack" >
<infrastructure name="test2">
<compute name="test2" architecture="x86" cores="1" memory="1G" speed="1G"/>
<storage name="test2" size="10G"/>
<network name="test2" label="ethernet" vlan="100M"/>
</infrastructure>
<image name="test2">
<system name="debian_with_cosacs"/>
<package
name="test2"
installation="mkdir /home/cosacs/vifib"
configuration="cp /home/cosacs/cosacs.xml /home/cosacs/vifib"
/>
</image>
</node>
<configuration name="test2">
<action name="test2_1" expression="test2.x='1';"/>
<action name="test2_2" expression="test2.y='2';"/>
<action name="test2_3" expression="test2.z='3';"/>
</configuration>
<interface name="test2"/>
<account name="slaposrecipe"/>
<security name="test2"/>
</manifest>
\ No newline at end of file
[agent]
timeout = ${slap-parameter:timeout}
node_title = ${slap-parameter:node_title}
project_title = ${slap-parameter:project_title}
task_count = ${slap-parameter:task_count}
report_url = ${slap-parameter:report_url}
working_directory = ${directory:testnode}
[buildout]
parts =
instance
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[directory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
service = $${buildout:directory}/etc/service
run = $${buildout:directory}/etc/run
agentlog = $${buildout:directory}/var/log/agent
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
testnode = $${buildout:directory}/srv/testnode
[instance]
recipe = slapos.cookbook:wrapper
command-line =
${buildout:bin-directory}/agent
--pidfile=$${buildout:directory}/srv/agent.pid
--log=$${buildout:directory}/var/log/agent/agent.log
$${agent-cfg:output}
wrapper-path = $${directory:service}/agent
output = $${:wrapper-path}
[agent-cfg]
recipe = slapos.recipe.template
url = ${agent.cfg.in:target}
output = $${directory:etc}/agent.cfg
[slap-parameter]
timeout = 3600
node_title =
test_title =
project_title =
task_count = 1
report_url =
[buildout]
parts =
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-agent:output}
[buildout]
extends =
../../component/lxml-python/buildout.cfg
../../component/git/buildout.cfg
../../component/pycurl/buildout.cfg
../../stack/slapos.cfg
parts =
agent.cfg.in
template
template-agent
slapos-cookbook
script
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
md5sum = bcd3b3cb8a305c83bb048d5ac1c583fe
mode = 0644
[template-agent]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-agent.cfg
output = ${buildout:directory}/template-agent.cfg
md5sum = 797e80e43bbf9c0d0e0c5db88a091667
mode = 0644
[agent.cfg.in]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/agent.cfg.in
md5sum = 056b1bb005080c4324cd9d755d254131
mode = 0644
[script]
recipe = zc.recipe.egg
eggs =
${pycurl:egg}
zc.buildout
slapos.core
slapos.toolbox
erp5.util
[versions]
ecdsa = 0.13
gitdb = 0.6.4
pycrypto = 2.6.1
pycurl = 7.43.0
slapos.recipe.download = 1.0
smmap = 0.9.0
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
parts = instance
[instance]
recipe = ${instance-recipe:egg}:${instance-recipe:module}
source = ${application:location}
lampconfigure_directory = ${buildout:bin-directory}/lampconfigure
httpd_binary = ${apache:location}/bin/httpd
mysql_binary = ${mariadb:location}/bin/mysql
mysql_install_binary = ${mariadb:location}/bin/mysql_install_db
mysql_upgrade_binary = ${mariadb:location}/bin/mysql_upgrade
mysqld_binary = ${mariadb:location}/libexec/mysqld
delete = install
table_name = utilisateur
constraint = `id_utilisateur`>0
[buildout]
versions = versions
parts =
template
apache-php
mariadb
eggs
instance-recipe-egg
extends =
../../stack/lamp.cfg
[application]
recipe = slapos.recipe.build:download-unpacked
url = http://www.agora-project.net/agora-project.zip
md5sum = 3fecb27ca5d3bb6c263dbd87113f3cce
[instance-recipe]
egg = slapos.cookbook
module = lamp.simple
[template]
# Default template for the instance.
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
#${:_profile_base_location_}/instance.cfg
#md5sum = Student shall put md5 of instance.cfg here
output = ${buildout:directory}/template.cfg
mode = 0644
[instance-recipe-egg]
recipe = zc.recipe.egg
eggs =
${mysql-python:egg}
${instance-recipe:egg}
slapos.toolbox[lampconfigure]
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[instance]
filename = instance.cfg
md5sum = 1fbb72e4305ded9614ee80c0ee17b672
[instance-apache]
filename = instance-apachephp.cfg
md5sum = a9bc574f838985b8b8b165e2ce12fe61
[website1-template]
filename = templates/index.html.in
md5sum = c5695762361b801c284ee23a150cd1f1
[website2-template]
filename = templates/index.html.in
md5sum = c5695762361b801c284ee23a150cd1f1
[template-httpd-conf-hash]
filename = templates/apache.conf.in
md5sum = 4fc3f853abf702aeb6653942d4fb85d8
[buildout]
parts =
certificate-authority
ca-stunnel
logrotate
logrotate-entry-apache
logrotate-entry-stunnel
cron
cron-entry-logrotate
promise
frontend-ajaxupload-promise
frontend-website2-promise
frontend-website-promise
# content-promise
publish-connection-informations
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
#----------------
#--
#-- Creation of all needed directories.
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
srv = $${buildout:directory}/srv
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
[basedirectory]
recipe = slapos.cookbook:mkdirectory
log = $${rootdirectory:var}/log
services = $${rootdirectory:etc}/service
run = $${rootdirectory:var}/run
backup = $${rootdirectory:srv}/backup
promises = $${rootdirectory:etc}/promise
[directory]
recipe = slapos.cookbook:mkdirectory
cron-entries = $${rootdirectory:etc}/cron.d
crontabs = $${rootdirectory:etc}/crontabs
cronstamps = $${rootdirectory:etc}/cronstamps
ca-dir = $${rootdirectory:srv}/ssl
httpd-log = $${basedirectory:log}/apache
php-ini-dir = $${rootdirectory:etc}/php
tmp-php = $${rootdirectory:tmp}/php
logrotate-entries = $${rootdirectory:etc}/logrotate.d
logrotate-backup = $${basedirectory:backup}/logrotate
report = $${rootdirectory:etc}/report
stunnel-conf = $${rootdirectory:etc}/stunnel
xml-report = $${rootdirectory:var}/xml_report
www = $${rootdirectory:srv}/www/
[cadirectory]
recipe = slapos.cookbook:mkdirectory
requests = $${directory:ca-dir}/requests
private = $${directory:ca-dir}/private
certs = $${directory:ca-dir}/certs
newcerts = $${directory:ca-dir}/newcerts
crl = $${directory:ca-dir}/crl
#----------------
#--
#-- Deploy cron.
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cron-entries = $${directory:cron-entries}
crontabs = $${directory:crontabs}
cronstamps = $${directory:cronstamps}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${basedirectory:log}/crond.log
#----------------
#--
#-- Deploy logrotate.
[cron-entry-logrotate]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 0 0 * * *
command = $${logrotate:wrapper}
[logrotate]
recipe = slapos.cookbook:logrotate
# Binaries
logrotate-binary = ${logrotate:location}/usr/sbin/logrotate
gzip-binary = ${gzip:location}/bin/gzip
gunzip-binary = ${gzip:location}/bin/gunzip
# Directories
wrapper = $${rootdirectory:bin}/logrotate
conf = $${rootdirectory:etc}/logrotate.conf
logrotate-entries = $${directory:logrotate-entries}
backup = $${directory:logrotate-backup}
state-file = $${rootdirectory:srv}/logrotate.status
#----------------
#--
#-- Deploy stunnel.
[stunnel]
recipe = slapos.cookbook:stunnel
client = true
stunnel-binary = ${stunnel:location}/bin/stunnel
remote-host = $${mariadb-urlparse:host}
remote-port = $${mariadb-urlparse:port}
local-host = $${slap-network-information:local-ipv4}
local-port = 3306
log-file = $${basedirectory:log}/stunnel.log
config-file = $${directory:stunnel-conf}/stunnel.conf
key-file = $${directory:stunnel-conf}/stunnel.key
cert-file = $${directory:stunnel-conf}/stunnel.crt
pid-file = $${basedirectory:run}/stunnel.pid
wrapper = $${rootdirectory:bin}/raw_stunnel
post-rotate-script = $${rootdirectory:bin}/stunnel_post_rotate
[logrotate-entry-stunnel]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = stunnel
log = $${stunnel:log-file}
frequency = daily
rotate-num = 30
notifempty = true
create = true
post = $${stunnel:post-rotate-script}
#----------------
#--
#-- Certificate stuff.
[certificate-authority]
recipe = slapos.cookbook:certificate_authority
openssl-binary = ${openssl:location}/bin/openssl
ca-dir = $${directory:ca-dir}
requests-directory = $${cadirectory:requests}
wrapper = $${basedirectory:services}/ca
ca-private = $${cadirectory:private}
ca-certs = $${cadirectory:certs}
ca-newcerts = $${cadirectory:newcerts}
ca-crl = $${cadirectory:crl}
[ca-stunnel]
<= certificate-authority
recipe = slapos.cookbook:certificate_authority.request
executable = $${stunnel:wrapper}
wrapper = $${basedirectory:services}/stunnel
key-file = $${stunnel:key-file}
cert-file = $${stunnel:cert-file}
#----------------
#--
#-- Request MariaDB instance and parse its URL.
[request-mariadb]
<= slap-connection
recipe = slapos.cookbook:request
name = MariaDB
software-url = $${slap-connection:software-release-url}
software-type = mariadb
return = url
sla-computer_guid = $${slap-connection:computer-id}
[mariadb-urlparse]
recipe = slapos.cookbook:urlparse
url = $${request-mariadb:connection-url}
#----------------
#--
#-- Deploy Apache + PHP application.
[httpd-conf]
recipe = slapos.recipe.template
url = ${template-httpd-conf:location}/${template-httpd-conf:filename}
output = $${rootdirectory:etc}/apache.conf
mode = 0600
document_root = $${rootdirectory:srv}/www/
pid_file = $${basedirectory:run}/apache.pid
lock_file = $${basedirectory:run}/apache.lock
ip = $${slap-network-information:global-ipv6}
port = 8080
port2 = 8070
port3 = 8090
error_log = $${directory:httpd-log}/error.log
access_log = $${directory:httpd-log}/access.log
php_ini_dir = $${directory:php-ini-dir}
# Deploy Apache + PHP application
[apache-php]
recipe = slapos.cookbook:apachephp
source = ${application:location}
template =
configuration =
htdocs = $${directory:www}
pid-file = $${basedirectory:run}/apache.pid
lock-file = $${basedirectory:run}/apache.lock
ip = $${httpd-conf:ip}
port = $${httpd-conf:port}
url = http://[$${:ip}]:$${:port}/
error-log = $${directory:httpd-log}/error.log
access-log = $${directory:httpd-log}/access.log
php-ini-dir = $${directory:php-ini-dir}
tmp-dir = $${directory:tmp-php}
wrapper = $${basedirectory:services}/apache
httpd-binary = ${apache:location}/bin/httpd
default-conf = false
httpd-conf = $${httpd-conf:output}
mysql-username = $${mariadb-urlparse:username}
mysql-password = $${mariadb-urlparse:password}
mysql-database = $${mariadb-urlparse:path}
mysql-host = $${stunnel:local-host}
mysql-port = $${stunnel:local-port}
[logrotate-entry-apache]
<= logrotate
recipe = slapos.cookbook:logrotate.d
name = apache
log = $${apache-php:error-log} $${apache-php:access-log}
frequency = daily
rotate-num = 30
sharedscripts = true
notifempty = true
create = true
#----------------
#--
#-- Request frontends.
[request-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Frontend-Website
# XXX We have hardcoded SR URL here.
software-url = $${slap-parameter:frontend-software-url}
shared = true
config-url = http://[$${apache-php:ip}]:$${apache-php:port}/
return = site_url
config-custom_domain = $${slap-parameter:domain}
[request-frontend-ajaxupload]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Frontend-FileManager
# XXX We have hardcoded SR URL here.
software-url = $${slap-parameter:frontend-software-url}
shared = true
config-url = http://[$${httpd-conf:ip}]:$${httpd-conf:port2}/
return = site_url
config-custom_domain = $${slap-parameter:domain2}
[request-frontend2]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Frontend-Website2
# XXX We have hardcoded SR URL here.
software-url = $${slap-parameter:frontend-software-url}
shared = true
config-url = http://[$${httpd-conf:ip}]:$${httpd-conf:port3}/
return = site_url
config-custom_domain = $${slap-parameter:domain3}
#----------------
#--
#-- Publish instance parameters.
[publish-connection-informations]
recipe = slapos.cookbook:publish
website-backend-url = $${apache-php:url}
website-url = $${request-frontend:connection-site_url}
website2-backend-url = http://[$${apache-php:ip}]:$${httpd-conf:port3}
website2-url = $${request-frontend2:connection-site_url}
filemanager-backend-url = http://[$${apache-php:ip}]:$${httpd-conf:port2}
fileManager-url = $${request-frontend-ajaxupload:connection-site_url}
mysql-username = $${mariadb-urlparse:username}
mysql-password = $${mariadb-urlparse:password}
mysql-database = $${mariadb-urlparse:path}
mysql-host = $${stunnel:local-host}
mysql-port = $${stunnel:local-port}
#----------------
#--
#-- Deploy promises scripts.
[promise]
recipe = slapos.cookbook:check_port_listening
path = $${basedirectory:promises}/apache
hostname = $${apache-php:ip}
port = $${apache-php:port}
[frontend-website-promise]
recipe = slapos.cookbook:check_url_available
path = $${basedirectory:promises}/frontend-website
url = $${request-frontend:connection-site_url}
dash_path = ${dash:location}/bin/dash
curl_path = ${curl:location}/bin/curl
[frontend-website2-promise]
recipe = slapos.cookbook:check_url_available
path = $${basedirectory:promises}/frontend-website2
url = $${request-frontend2:connection-site_url}
dash_path = ${dash:location}/bin/dash
curl_path = ${curl:location}/bin/curl
[frontend-ajaxupload-promise]
recipe = slapos.cookbook:check_url_available
path = $${basedirectory:promises}/frontend-ajaxupload
url = $${request-frontend-ajaxupload:connection-site_url}
dash_path = ${dash:location}/bin/dash
curl_path = ${curl:location}/bin/curl
[content-promise]
recipe = slapos.cookbook:check_page_content
path = $${basedirectory:promises}/content
url = $${request-frontend-ajaxupload:connection-site_url}
match = AjaXplorer
dash_path = ${dash:location}/bin/dash
curl_path = ${curl:location}/bin/curl
[slap-parameter]
# Default value if no domain is specified
domain =
domain2 =
domain3 =
frontend-software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
# Default value if no ssh parameter is specified
logbox-ip =
logbox-port =
logbox-user =
logbox-passwd =
[buildout]
parts =
switch_softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch_softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${instance-apache:output}
mariadb = ${instance-mariadb:output}
[buildout]
parts =
slapos-cookbook
subversion
apache-php
instance
instance-apache
instance-mariadb
template-httpd-conf
application
website1-template
website2-template
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/apache/buildout.cfg
../../component/apache-php/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/subversion/buildout.cfg
../../component/gzip/buildout.cfg
../../component/dcron/buildout.cfg
../../component/dash/buildout.cfg
../../component/stunnel/buildout.cfg
../../component/lxml-python/buildout.cfg
[instance]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template.cfg
mode = 0644
[instance-apache]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template-apachephp.cfg
mode = 0644
[instance-mariadb]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/../../stack/lamp/mariadb/instance-mariadb.cfg.in
output = ${buildout:directory}/template-mariadb.cfg
mode = 0644
[application]
recipe = hexagonit.recipe.download
ignore-existing = true
url = http://garr.dl.sourceforge.net/project/ajaxplorer/ajaxplorer/dev-channel/4.3.4/ajaxplorer-core-4.3.4.tar.gz
md5sum = 2f2ff8bda7bbe841ef0e870c724eb74f
strip-top-level-dir = true
[website]
recipe = z3c.recipe.mkdir
path1 = ${application:location}/data/files/website
path2 = ${application:location}/data/files/website2
paths =
${:path1}
${:path2}
[website1-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${website:path1}/index.html
mode = 0644
[website2-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename}
output = ${website:path2}/index.html
mode = 0644
[template-httpd-conf]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/${template-httpd-conf-hash:filename}
mode = 0644
filename = apache.conf.in
md5sum = ${template-httpd-conf-hash:md5sum}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[versions]
# Numpy 1.7.0 doesn't install well
numpy = 1.16.4
# websockify 0.4.1 doesn't install well
websockify = 0.3.0
plone.recipe.command = 1.1
z3c.recipe.mkdir = 0.5
# Apache static configuration
# Automatically generated
# Basic server configuration
PidFile "${:pid_file}"
Listen ${:ip}:8070
Listen ${:ip}:8080
Listen ${:ip}:8090
PHPINIDir ${:php_ini_dir}
ServerAdmin someone@email
TypesConfig conf/mime.types
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
AddType application/x-httpd-php .php .phtml .php5 .php4
AddType application/x-httpd-php-source .phps
# Log configuration
ErrorLog "${:error_log}"
LogLevel warn
LogFormat "%h %{REMOTE_USER}i %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
LogFormat "%h %{REMOTE_USER}i %l %u %t \"%r\" %>s %b" common
CustomLog "${:access_log}" common
NameVirtualHost ${:ip}:8090
NameVirtualHost ${:ip}:8080
NameVirtualHost ${:ip}:8070
<VirtualHost ${:ip}:8070>
#ServerName www.example.com
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Require all denied
</Directory>
<Directory ${:document_root}>
Options FollowSymLinks
AllowOverride All
Require all granted
</Directory>
DocumentRoot ${:document_root}
DirectoryIndex index.html index.php
</VirtualHost>
<VirtualHost ${:ip}:8080>
#ServerName www.example.com
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Require all denied
</Directory>
<Directory ${:document_root}data/files/website/>
Options FollowSymLinks
AllowOverride All
Require all granted
</Directory>
DocumentRoot ${:document_root}data/files/website/
DirectoryIndex index.html index.php
</VirtualHost>
<VirtualHost ${:ip}:8090>
#ServerName www.example.com
# Directory protection
<Directory />
Options FollowSymLinks
AllowOverride None
Require all denied
</Directory>
<Directory ${:document_root}data/files/website2/>
Options FollowSymLinks
AllowOverride All
Require all granted
</Directory>
DocumentRoot ${:document_root}data/files/website2/
DirectoryIndex index.html index.php
</VirtualHost>
# List of modules
LoadModule unixd_module modules/mod_unixd.so
LoadModule access_compat_module modules/mod_access_compat.so
LoadModule authz_core_module modules/mod_authz_core.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule version_module modules/mod_version.so
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule mime_module modules/mod_mime.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule negotiation_module modules/mod_negotiation.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule headers_module modules/mod_headers.so
LoadModule dir_module modules/mod_dir.so
LoadModule php5_module modules/libphp5.so
LoadModule alias_module modules/mod_alias.so
<html>
<head>
<title>New Document - Created By AjaXplorer</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body bgcolor="#FFFFFF" text="#000000">
<h2>Hi, this website folder is still empty. Please go to your File Manager administration and replace this folder by your new website files.</h2>
</body>
</html>
Apache:
=======
* (Minor Bug) Check if slave in slave_list before publishing information on it
# THIS IS NOT A BUILDOUT FILE, despite purposedly using a compatible syntax.
# The only allowed lines here are (regexes):
# - "^#" comments, copied verbatim
# - "^[" section beginings, copied verbatim
# - lines containing an "=" sign which must fit in the following categorie.
# - "^\s*filename\s*=\s*path\s*$" where "path" is relative to this file
# Copied verbatim.
# - "^\s*hashtype\s*=.*" where "hashtype" is one of the values supported
# by the re-generation script.
# Re-generated.
# - other lines are copied verbatim
# Substitution (${...:...}), extension ([buildout] extends = ...) and
# section inheritance (< = ...) are NOT supported (but you should really
# not need these here).
[template]
filename = instance.cfg
md5sum = 4adae801babadd9aed6112c1ff209076
[template-apache-frontend]
filename = instance-apache-frontend.cfg
md5sum = ec825cba3ea775452716f1da7cd2e549
[template-apache-replicate]
filename = instance-apache-replicate.cfg.in
md5sum = 9e76028df7e93d3e32982884d5dc0913
[template-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
md5sum = 0fda1ba57fb0c4f1b51a6a94a2bd91d3
[template-slave-configuration]
filename = templates/custom-virtualhost.conf.in
md5sum = b3d5a3573fa381fec954ad5cbf1575a0
[template-replicate-publish-slave-information]
filename = templates/replicate-publish-slave-information.cfg.in
md5sum = 7c0025a35dc62eb3cbfd07a6b04d659e
[template-apache-frontend-configuration]
filename = templates/apache.conf.in
md5sum = b666d7c4a5c1fd8020713aa53b44a386
[template-custom-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
md5sum = 0fda1ba57fb0c4f1b51a6a94a2bd91d3
[template-not-found-html]
filename = templates/notfound.html
md5sum = f20d6c3d2d94fb685f8d26dfca1e822b
[template-default-virtualhost]
filename = templates/000.conf.in
md5sum = 5fcc11ea7f3eae38b0135b94dfc42dd5
[template-default-slave-virtualhost]
filename = templates/default-virtualhost.conf.in
md5sum = 7890469ecc5e7f46be5b2c3074a09b52
[template-cached-slave-virtualhost]
filename = templates/cached-virtualhost.conf.in
md5sum = f97dfcfae80527e242f285c9ebf11b27
[template-log-access]
filename = templates/template-log-access.conf.in
md5sum = a20683faba37b5b9c035783e811dd88d
[template-empty]
filename = templates/empty.in
md5sum = 7155b18edfe128825b8d1f48071454a6
[template-wrapper]
filename = templates/wrapper.in
md5sum = 975177dedf677d24e14cede5d13187ce
[template-trafficserver-records-config]
filename = templates/trafficserver/records.config.jinja2
md5sum = 84baef0a49c9a65e8f2d2ffdb8c1d39c
[template-trafficserver-storage-config]
filename = templates/trafficserver/storage.config.jinja2
md5sum = 117238225b3fc3c5b5be381815f44c67
[template-nginx-configuration]
filename = templates/nginx.cfg.in
md5sum = f334ef32234771aee69c06f843da1980
[template-nginx-eventsource-slave-virtualhost]
filename = templates/nginx-eventsource-slave.conf.in
md5sum = a5186f666acb2f040ede04c91e60408f
[template-nginx-notebook-slave-virtualhost]
filename = templates/nginx-notebook-slave.conf.in
md5sum = 82d74a7f2aceb2b4a7acc6259291b7f2
[template-apache-lazy-script-call]
filename = templates/apache-lazy-script-call.sh.in
md5sum = 44e235d0451932f1232f9d9e0ecc59f9
[template-apache-graceful-script]
filename = templates/apache-graceful-script.sh.in
md5sum = 41299cc64200e7b8217fb9dec20bb8b9
[buildout]
extends =
buildout.hash.cfg
../../stack/slapos.cfg
../../component/git/buildout.cfg
../../component/dash/buildout.cfg
../../component/lxml-python/buildout.cfg
../../component/apache/buildout.cfg
../../component/gzip/buildout.cfg
../../component/stunnel/buildout.cfg
../../component/dcron/buildout.cfg
../../component/logrotate/buildout.cfg
../../component/pycurl/buildout.cfg
../../component/python-cryptography/buildout.cfg
../../component/rdiff-backup/buildout.cfg
../../component/trafficserver/buildout.cfg
../../component/pycurl/buildout.cfg
../../component/nginx/buildout.cfg
../../stack/nodejs.cfg
# Monitoring stack (keep on bottom)
../../stack/monitor/buildout.cfg
parts +=
template
template-apache-frontend
template-apache-replicate
apache
apache-antiloris
stunnel
dcron
logrotate
rdiff-backup
nginx-push-stream-output
npm-modules
proxy-by-url
http-proxy
[apache]
# install apache not shared, so that we can install antiloris modules
shared = true
[extra-eggs]
recipe = zc.recipe.egg
eggs =
websockify
erp5.util
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
output = ${buildout:directory}/template.cfg
mode = 0644
[template-apache-frontend]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apache-frontend.cfg
output = ${buildout:directory}/template-apache-frontend.cfg
mode = 0644
[template-apache-replicate]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-apache-replicate.cfg.in
mode = 0644
[download-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/templates/${:filename}
mode = 640
[template-slave-list]
<=download-template
filename = apache-custom-slave-list.cfg.in
[template-slave-configuration]
<=download-template
filename = custom-virtualhost.conf.in
[template-replicate-publish-slave-information]
<=download-template
filename = replicate-publish-slave-information.cfg.in
[template-apache-frontend-configuration]
<=download-template
filename = apache.conf.in
[template-custom-slave-list]
<=download-template
filename = apache-default-slave-list.cfg.in
[template-not-found-html]
<=download-template
filename = notfound.html
[template-default-virtualhost]
<=download-template
filename = 000.conf.in
[template-default-slave-virtualhost]
<=download-template
filename = default-virtualhost.conf.in
[template-cached-slave-virtualhost]
<=download-template
filename = cached-virtualhost.conf.in
[template-log-access]
<=download-template
filename = template-log-access.conf.in
[template-empty]
<=download-template
filename = empty.in
[template-wrapper]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/wrapper.in
output = ${buildout:directory}/template-wrapper.cfg
mode = 0644
[template-trafficserver-records-config]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/templates/trafficserver/${:filename}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = records.config.jinja2
download-only = true
mode = 0644
[template-trafficserver-storage-config]
recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/templates/trafficserver/${:filename}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
filename = storage.config.jinja2
download-only = true
mode = 0644
# NGINX Configuration
[template-nginx-configuration]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/templates/nginx.cfg.in
output = ${buildout:directory}/template-nginx.cfg.in
mode = 0644
[template-apache-lazy-script-call]
<=download-template
filename = apache-lazy-script-call.sh.in
[template-apache-graceful-script]
<=download-template
filename = apache-graceful-script.sh.in
[template-nginx-eventsource-slave-virtualhost]
<=download-template
filename = nginx-eventsource-slave.conf.in
[template-nginx-notebook-slave-virtualhost]
<=download-template
filename = nginx-notebook-slave.conf.in
# Migrated from KVM recipe
[http-proxy]
# https://github.com/nodejitsu/node-http-proxy
recipe = slapos.recipe.build:download-unpacked
# use upstream when merged
url = https://lab.nexedi.com/nexedi/node-http-proxy/repository/archive.zip?ref=a5d3aff428ee8d840068b439f6ce121077f1144f
md5sum = 65602466066444c718215de41f546585
[proxy-by-url]
# https://github.com/dominictarr/proxy-by-url
recipe = slapos.recipe.build:download-unpacked
# use upstream when merged
url = https://lab.nexedi.com/nexedi/proxy-by-url/repository/archive.zip?ref=59fcb11a3e00c45b4b0362e76f29653abb313072
md5sum = c383e0c5ff31b56f7987466e8304c941
[npm-modules]
recipe = plone.recipe.command
destination = ${buildout:parts-directory}/${:_buildout_section_name_}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command =
export HOME=${:location};
rm -fr ${:destination} &&
mkdir -p ${:destination} &&
cd ${:destination} &&
${nodejs:location}/bin/node ${nodejs:location}/bin/npm install colors@0.6.0-1 &&
${nodejs:location}/bin/node ${nodejs:location}/bin/npm install socket.io@0.8.7 &&
${nodejs:location}/bin/node ${nodejs:location}/bin/npm install socket.io-client@0.8.7 &&
${nodejs:location}/bin/node ${nodejs:location}/bin/npm install optimist@0.3.1 &&
${nodejs:location}/bin/node ${nodejs:location}/bin/npm install pkginfo@0.2.3
# Development profile of apache-frontend.
# Exactly the same as software.cfg, but fetch the slapos.cookbook
# from git repository instead of fetching stable version,
# allowing to play with bleeding edge environment.
# You'll need to run buildout twice for this profile.
[buildout]
extends =
# Extend in this order, otherwise "parts" will be taken from git profile
common.cfg
parts +=
slapos.toolbox-dev
[slapos.toolbox-dev]
recipe = zc.recipe.egg:develop
egg = slapos.toolbox
setup = ${slapos.toolbox-repository:location}
{
"type": "object",
"$schema": "http://json-schema.org/draft-04/schema",
"title": "Input Parameters",
"properties": {
"public-ipv4": {
"title": "Public IPv4",
"description": "Public ipv4 of the frontend (the one Apache will be indirectly listening to).",
"type": "string"
},
"ip-read-limit": {
"title": "IPReadLimit",
"description": "Value used to set IPReadLimit Parameter for antiloris.",
"type": "integer",
"default": 10
},
"mpm-server-limit": {
"title": "ServerLimit",
"description": "Value used to set ServerLimit on apache configuration.",
"type": "integer",
"default": 16
},
"mpm-max-clients": {
"title": "MaxClients",
"description": "Value used to set MaxClients on apache configuration.",
"type": "integer",
"default": 400
},
"mpm-start-servers": {
"title": "StartServers",
"description": "Value used to set StartServers on apache configuration.",
"type": "integer",
"default": 3
},
"mpm-thread-per-child": {
"title": "ThreadsPerChild",
"description": "Value used to set ThreadsPerChild on apache configuration.",
"type": "integer",
"default": 25
},
"mpm-graceful-shutdown-timeout": {
"title": "ThreadsPerChild",
"description": "Value used to set ThreadsPerChild on apache configuration.",
"type": "integer",
"default": 5
},
"enable-http2-by-default": {
"title": "Enable HTTP2 by Default",
"description": "Use HTTP2 as default Protocol",
"type": "string",
"default": "true",
"enum": [
"true",
"false"
]
},
"re6st-verification-url": {
"title": "Test Verification URL",
"description": "Url to verify if the internet and/or re6stnet is working.",
"type": "string"
},
"-frontend-authorized-slave-string": {
"title": "Authorized Slave String",
"description": "List of SOFTINST-XXX of Slaves, separated by space which is allowed to use custom configuration.",
"type": "string"
},
"apache-key": {
"title": "Apache Key",
"description": "Apache Key",
"textarea": true,
"type": "string"
},
"apache-certificate": {
"title": "Apache Certificate",
"description": "Apache Certificate",
"textarea": true,
"type": "string"
},
"apache-ca-certificate": {
"title": "Apache CA Certificate",
"description": "Apache CA Certificate",
"textarea": true,
"type": "string"
},
"domain": {
"title": "Domain",
"description": "Base Domain for create subdomains (ie.: example.com).",
"type": "string",
"pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$"
},
"nginx-domain": {
"title": "Nginx Domain",
"description": "Base Domain for create subdomains (ie.: example2.com) for websocket, notebook and eventsource.",
"type": "string",
"pattern": "^([a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}$"
},
"-frontend-quantity": {
"title": "Frontend Replication Quantity",
"description": "Quantity of Frontends Replicate.",
"type": "integer"
}
}
}
{% if slap_software_type in software_type -%}
[jinja2-template-base]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:directory}/${:filename}
extra-context =
context =
import json_module json
key eggs_directory buildout:eggs-directory
key develop_eggs_directory buildout:develop-eggs-directory
key slap_software_type slap-parameter:slap_software_type
key slave_instance_list slap-parameter:slave_instance_list
${:extra-context}
{% set part_list = [] -%}
{% set single_type_key = 'single-' %}
{% if slap_software_type == "replicate" %}
{% set frontend_type = slapparameter_dict.pop('-frontend-type', 'single-default') -%}
{% elif slap_software_type in ['default', 'RootSoftwareInstance'] -%}
{% set frontend_type = "%s%s" % (single_type_key, 'custom-personal') -%}
{% else -%}
{% set frontend_type = "%s%s" % (single_type_key, slap_software_type) -%}
{% endif -%}
{% set frontend_quantity = slapparameter_dict.pop('-frontend-quantity', '1') | int -%}
{% set slave_list_name = 'extra_slave_instance_list' -%}
{% set frontend_list = [] %}
{% set frontend_section_list = [] %}
{% set request_dict = {} %}
{% set namebase = 'apache-frontend' -%}
# XXX Dirty hack, not possible to define default value before
{% set sla_computer_apache_1_key = '-sla-1-computer_guid' -%}
{% if not sla_computer_apache_1_key in slapparameter_dict -%}
{% do slapparameter_dict.__setitem__(sla_computer_apache_1_key, '${slap-connection:computer-id}') -%}
{% endif -%}
# Here we request individualy each frontend.
# The presence of sla parameters is checked and added if found
{% for i in range(1, frontend_quantity + 1) -%}
{% set frontend_name = "%s-%s" % (namebase, i) -%}
{% set request_section_title = 'request-%s' % frontend_name -%}
{% set sla_key = "-sla-%s-" % i -%}
{% set sla_key_length = sla_key | length %}
{% set sla_dict = {} %}
{% set config_key = "-frontend-config-%s-" % i %}
{% set config_key_length = config_key | length %}
{% set config_dict = {} %}
{% for key in slapparameter_dict.keys() %}
{% if key.startswith(sla_key) %}
{% do sla_dict.__setitem__(key[sla_key_length:], slapparameter_dict.pop(key)) %}
# We check for specific configuration regarding the frontend
{% elif key.startswith(config_key) -%}
{% do config_dict.__setitem__(key[config_key_length:], slapparameter_dict.pop(key)) %}
{% endif -%}
{% endfor -%}
{% do frontend_list.append(frontend_name) -%}
{% do frontend_section_list.append(request_section_title) -%}
{% do part_list.append(request_section_title) -%}
# Filling request dict for slave
{% set state_key = "-frontend-%s-state" % i %}
{% do request_dict.__setitem__(request_section_title,
{
'config': config_dict,
'name': frontend_name,
'sla': sla_dict,
'state': slapparameter_dict.pop(state_key, None)
}) %}
{% endfor -%}
{% set authorized_slave_string = slapparameter_dict.pop('-frontend-authorized-slave-string', '') -%}
{% set authorized_slave_list = [] %}
{% set rejected_slave_list = [] %}
{% for slave in slave_instance_list %}
{% if not (slave.has_key('apache_custom_http') and not slave.get('slave_reference') in authorized_slave_string) %}
{% do authorized_slave_list.append(slave) %}
{% else %}
{% do rejected_slave_list.append(slave.get('slave_reference')) %}
{% endif %}
{% endfor -%}
[replicate]
<= slap-connection
recipe = slapos.cookbook:requestoptional
config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
config-monitor-username = ${monitor-instance-parameter:username}
config-monitor-password = ${monitor-htpasswd:passwd}
{% set frontend_software_url_key = "-frontend-software-release-url" %}
{% if slapparameter_dict.has_key(frontend_software_url_key) %}
software-url = {{ slapparameter_dict.pop(frontend_software_url_key) }}
{% else %}
software-url = ${slap-connection:software-release-url}
{% endif %}
software-type = {{frontend_type}}
return = private-ipv4 public-ipv4 slave-instance-information-list monitor-base-url
{% for section, frontend_request in request_dict.iteritems() %}
[{{section}}]
<= replicate
name = {{ frontend_request.get('name') }}
{% if frontend_request.get('state') %}
state = {{ frontend_request.get('state') }}
{% endif%}
{% set slave_configuration_dict = frontend_request.get('config') %}
{% do slave_configuration_dict.update(**slapparameter_dict) %}
{% do slave_configuration_dict.__setitem__(slave_list_name, json_module.dumps(authorized_slave_list)) %}
{% do slave_configuration_dict.__setitem__("frontend-name", frontend_request.get('name')) %}
config-_ = {{ json_module.dumps(slave_configuration_dict) }}
{% if frontend_request.get('sla') %}
{% for parameter, value in frontend_request.get('sla').iteritems() -%}
sla-{{ parameter }} = {{ value }}
{% endfor -%}
{% endif -%}
{% endfor -%}
[publish-information]
<= monitor-publish
recipe = slapos.cookbook:publish
domain = {{ slapparameter_dict.get('domain') }}
slave-amount = {{ slave_instance_list | length }}
accepted-slave-amount = {{ authorized_slave_list | length }}
rejected-slave-amount = {{ rejected_slave_list | length }}
rejected-slave-list = {{ json_module.dumps(rejected_slave_list) }}
#----------------------------
#--
#-- Publish slave information
[publish-slave-information]
recipe = slapos.cookbook:softwaretype
default = ${dynamic-publish-slave-information:rendered}
RootSoftwareInstance = ${dynamic-publish-slave-information:rendered}
replicate = ${dynamic-publish-slave-information:rendered}
custom-personal = ${dynamic-publish-slave-information:rendered}
custom-group = ${dynamic-publish-slave-information:rendered}
[slave-information]
{% for frontend_section in frontend_section_list -%}
{{ frontend_section }} = {{ "${%s:connection-slave-instance-information-list}" % frontend_section }}
{% endfor -%}
[dynamic-publish-slave-information]
< = jinja2-template-base
template = {{ template_publish_slave_information }}
filename = dynamic-publish-slave-information.cfg
extensions = jinja2.ext.do
extra-context =
section slave_information slave-information
[monitor-conf-parameters]
monitor-url-list +=
{% for frontend in frontend_section_list %}
{{ ' ${' + frontend + ':connection-monitor-base-url}' }}
{% endfor -%}
[buildout]
extends = {{ template_monitor }}
parts =
monitor-base
publish-slave-information
publish-information
{% for part in part_list -%}
{{ ' %s' % part }}
{% endfor -%}
# publish-information
eggs-directory = {{ eggs_directory }}
develop-eggs-directory = {{ develop_eggs_directory }}
offline = true
[slap_connection]
# Kept for backward compatiblity
computer_id = ${slap-connection:computer-id}
partition_id = ${slap-connection:partition-id}
server_url = ${slap-connection:server-url}
software_release_url = ${slap-connection:software-release-url}
key_file = ${slap-connection:key-file}
cert_file = ${slap-connection:cert-file}
[slap-parameter]
slave_instance_list =
-frontend-quantity = 1
-frontend-type = single-default
{%- endif %}
This diff is collapsed.
[buildout]
extends = common.cfg
[versions]
PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3
ecdsa = 0.13
gitdb = 0.6.4
plone.recipe.command = 1.1
pycrypto = 2.6.1
rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 4.4
smmap = 0.9.0
numpy = 1.16.4
websockify = 0.8.0
This diff is collapsed.
<VirtualHost *:{{ https_port }}>
{{ slave_parameter.get('apache_custom_https', '') }}
</VirtualHost>
<VirtualHost *:{{ http_port }}>
{{ slave_parameter.get('apache_custom_http', '') }}
</VirtualHost>
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment