Commit ef4c8b23 authored by Alain Takoudjou's avatar Alain Takoudjou

Merge branch 'master' into openstack

parents 3265b84f f48fea4a
Changes
=======
0.84 (2013-09-30)
-----------------
* Request.py: improve instance-state handling. [ba5f160]
* Resilient recipe: remove hashing of urls/names. [ee2aec8]
* Resilient pbs recipe: recover from rdiff-backup failures. [be7f2fc, 92ee0c3]
* Resilience: add pidfiles in PBS. [0b3ad5c]
* Resilient: don't hide exception, print it. [05b3d64, d2b0494]
* Resiliency: Only keep 10 increments of backup. [4e89e33]
* KVM SR: add fallback in case of download exception. [de8d796]
* slaprunner: don't check certificate for importer. [53dc772]
0.83.1 (2013-09-10)
------------------
......
......@@ -22,12 +22,12 @@ depends =
${libpng:so_version}
configure-options =
--enable-tee=yes
--enable-xlib=no
--enable-xlib=yes
environment =
PATH=${freetype:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${fontconfig:location}/lib/pkgconfig:${freetype:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig:${libpng:location}/lib/pkgconfig:${pixman:location}/lib/pkgconfig:${libX11:location}/lib/pkgconfig:${libXrender:location}/lib/pkgconfig
CPPFLAGS=-I${libpng:location}/include/ -I${zlib:location}/include -I${libX11:location}/include/ -I${xproto:location}/include -I${kbproto:location}/include -I${libXrender:location}/include -I${render:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${libXrender:location}/lib -Wl,-rpath=${libXrender:location}/lib
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${libXrender:location}/lib -Wl,-rpath=${libXrender:location}/lib -L${libX11:location}/lib
LD_LIBRARY_PATH=${render:location}/lib:${libX11:location}/lib:${libXrender:location}/lib
[pango]
......
......@@ -25,10 +25,10 @@ download-only = true
[mariadb]
recipe = slapos.recipe.cmmi
version = 5.5.32
version = 5.5.33a
revision = 1
url = http://downloads.askmonty.org/f/mariadb-${:version}/kvm-tarbake-jaunty-x86/mariadb-${:version}.tar.gz/from/http://ftp.osuosl.org/pub/mariadb
md5sum = 565c2dce6a2fb027c9d0ffbae4934135
md5sum = 00449a034b88490f16bd679b800bb850
# compile directory is required to build mysql plugins.
keep-compile-dir = true
patch-options = -p0
......@@ -56,7 +56,7 @@ environment =
CMAKE_PROGRAM_PATH=${cmake:location}/bin
CMAKE_INCLUDE_PATH=${libaio:location}/include:${ncurses:location}/include:${openssl:location}/include:${readline5:location}/include:${zlib:location}/include
CMAKE_LIBRARY_PATH=${libaio:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${readline5:location}/lib:${zlib:location}/lib
LDFLAGS=-L${libaio:location}/lib
LDFLAGS=-L${libaio:location}/lib -L${zlib:location}/lib
[mroonga-mariadb]
# mroonga - a storage engine for MySQL. It provides fast fulltext search feature to all MySQL users.
......
......@@ -28,7 +28,7 @@ from setuptools import setup, find_packages
import glob
import os
version = '0.83.1'
version = '0.84'
name = 'slapos.cookbook'
long_description = open("README.txt").read() + "\n" + \
open("CHANGES.txt").read() + "\n"
......
# -*- coding: utf-8 -*-
import logging
import time
import traceback
import slapos
from slapos.slap.slap import NotFoundError
......@@ -65,6 +66,7 @@ def takeover(server_url, key_file, cert_file, computer_guid,
cp_winner.rename(new_name=cp_exporter_ref)
break
except NotFoundError:
traceback.print_exc()
log.warning('Impossible to rename. Retrying in a few seconds...')
log.debug('Renamed.')
......
......@@ -60,7 +60,11 @@ def getSocketStatus(host, port):
# Download existing hard drive if needed at first boot
if not os.path.exists(disk_path) and virtual_hard_drive_url != '':
print('Downloading virtual hard drive...')
try:
urllib.urlretrieve(virtual_hard_drive_url, disk_path)
except:
os.remove(disk_path)
raise
md5sum = virtual_hard_drive_md5sum.strip()
if md5sum:
print('Checking MD5 checksum...')
......
......@@ -50,6 +50,9 @@ class Recipe(GenericBaseRecipe):
class Callback(GenericBaseRecipe):
def createCallback(self, notification_id, callback):
# XXX: hashing the name here and in
# slapos.toolbox/slapos/pubsub/__init__.py is completely messed up and
# prevent any debug.
callback_id = sha512(notification_id).hexdigest()
filepath = os.path.join(self.options['callbacks'], callback_id)
......
......@@ -25,12 +25,12 @@
#
##############################################################################
import hashlib
import json
import os
import signal
import subprocess
import sys
import textwrap
import urlparse
from slapos.recipe.librecipe import GenericSlapRecipe
......@@ -87,14 +87,17 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
url = entry.get('url')
if not url:
raise ValueError('Missing URL parameter for PBS recipe')
parsed_url = urlparse.urlparse(url)
# We assume that thanks to sha512 there's no collisions
url_hash = hashlib.sha512(url).hexdigest()
name_hash = hashlib.sha512(entry['name']).hexdigest()
slave_type = entry['type']
if not slave_type in ['pull', 'push']:
raise ValueError('type parameter must be either pull or push.')
promise_path = os.path.join(self.options['promises-directory'],
url_hash)
parsed_url = urlparse.urlparse(url)
slave_id = entry['notification-id']
print 'Processing PBS slave %s with type %s' % (slave_id, slave_type)
promise_path = os.path.join(self.options['promises-directory'], slave_id)
promise_dict = self.promise_base_dict.copy()
promise_dict.update(user=parsed_url.username,
host=parsed_url.hostname,
......@@ -104,72 +107,109 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
promise_dict)
path_list.append(promise)
host = parsed_url.hostname
known_hosts_file[host] = entry['server-key']
notifier_wrapper_path = os.path.join(self.options['wrappers-directory'], slave_id)
rdiff_wrapper_path = notifier_wrapper_path + '_raw'
# Create the rdiff-backup wrapper
# It is useful to separate it from the notifier so that we can run it
# Manually.
rdiffbackup_parameter_list = []
# XXX use -y because the host might not yet be in the
# trusted hosts file until the next time slapgrid is run.
remote_schema = '%(ssh)s -y -p %%s %(user)s@%(host)s' % \
{
rdiffbackup_remote_schema = '%(ssh)s -y -p %%s %(user)s@%(host)s' % {
'ssh': self.options['sshclient-binary'],
'user': parsed_url.username,
'host': parsed_url.hostname,
}
parameters = ['--remote-schema', remote_schema]
remote_directory = '%(port)s::%(path)s' % {'port': parsed_url.port,
'path': parsed_url.path}
local_directory = self.createDirectory(self.options['directory'],
name_hash)
if entry['type'] == 'push':
parameters.extend(['--restore-as-of', 'now'])
parameters.append('--force')
parameters.extend([local_directory, remote_directory])
comments = ['','Push data to a PBS *-import instance.','']
else:
parameters.extend([remote_directory, local_directory])
comments = ['','Pull data from a PBS *-export instance.','']
wrapper_basepath = os.path.join(self.options['wrappers-directory'],
url_hash)
if 'notify' in entry:
wrapper_path = wrapper_basepath + '_raw'
else:
wrapper_path = wrapper_basepath
wrapper = self.createWrapper(name=wrapper_path,
local_directory = self.createDirectory(self.options['directory'], entry['name'])
if slave_type == 'push':
# Create a simple rdiff-backup wrapper that will push
rdiffbackup_parameter_list.extend(['--remote-schema', rdiffbackup_remote_schema])
rdiffbackup_parameter_list.extend(['--restore-as-of', 'now'])
rdiffbackup_parameter_list.append('--force')
rdiffbackup_parameter_list.append(local_directory)
rdiffbackup_parameter_list.append(remote_directory)
comments = ['', 'Push data to a PBS *-import instance.', '']
rdiff_wrapper = self.createWrapper(
name=rdiff_wrapper_path,
command=self.options['rdiffbackup-binary'],
parameters=parameters,
comments = comments)
path_list.append(wrapper)
parameters=rdiffbackup_parameter_list,
comments=comments,
pidfile=os.path.join(self.options['run-directory'], '%s_raw.pid' % slave_id),
)
elif slave_type == 'pull':
# Wrap rdiff-backup call into a script that checks consistency of backup
# We need to manually escape the remote schema
rdiffbackup_parameter_list.extend(['--remote-schema', '"%s"' % rdiffbackup_remote_schema])
rdiffbackup_parameter_list.append(remote_directory)
rdiffbackup_parameter_list.append(local_directory)
comments = ['', 'Pull data from a PBS *-export instance.', '']
rdiff_wrapper_template = textwrap.dedent("""\
#!/bin/sh
# %(comment)s
RDIFF_BACKUP="%(rdiffbackup_binary)s"
$RDIFF_BACKUP %(rdiffbackup_parameter)s
if [ ! $? -eq 0 ]; then
# Check the backup, go to the last consistent backup, so that next
# run will be okay.
echo "Checking backup directory..."
$RDIFF_BACKUP --check-destination-dir %(local_directory)s
if [ ! $? -eq 0 ]; then
# Here, two possiblities:
# * The first backup failed. It is safe to remove it since there is nothing valuable there.
# * The backup has been complete, but is now in a really weird state. Not safe to remove it.
echo "Impossible to check backup: we move it to a safe place."
# XXX: bang
mv %(local_directory)s %(local_directory)s.$(date +%%s)
fi
else
# Everything's okay, cleaning up...
$RDIFF_BACKUP --remove-older-than %(remove_backup_older_than)s --force %(local_directory)s
fi
""")
rdiff_wrapper_content = rdiff_wrapper_template % {
'comment': comments,
'rdiffbackup_binary': self.options['rdiffbackup-binary'],
'local_directory': local_directory,
'rdiffbackup_parameter': ' \\\n '.join(rdiffbackup_parameter_list),
# XXX: only 10 increments is not enough by default.
'remove_backup_older_than': entry.get('remove-backup-older-than', '3B')
}
rdiff_wrapper = self.createFile(
name=rdiff_wrapper_path,
content=rdiff_wrapper_content,
mode=0700
)
if 'notify' in entry:
feed_url = '%s/get/%s' % (self.options['notifier-url'],
entry['notification-id'])
wrapper = self.createNotifier(notifier_binary=self.options['notifier-binary'],
wrapper=wrapper_basepath,
executable=wrapper_path,
path_list.append(rdiff_wrapper)
# Create notifier wrapper
notifier_wrapper = self.createNotifier(
notifier_binary=self.options['notifier-binary'],
wrapper=notifier_wrapper_path,
executable=rdiff_wrapper,
log=os.path.join(self.options['feeds'], entry['notification-id']),
title=entry.get('title', 'Untitled'),
title=entry.get('title', slave_id),
notification_url=entry['notify'],
feed_url=feed_url,
feed_url='%s/get/%s' % (self.options['notifier-url'], entry['notification-id']),
pidfile=os.path.join(self.options['run-directory'], '%s.pid' % slave_id)
)
path_list.append(wrapper)
#self.setConnectionDict(dict(feed_url=feed_url), entry['slave_reference'])
path_list.append(notifier_wrapper)
if 'on-notification' in entry:
path_list.append(self.createCallback(str(entry['on-notification']),
wrapper))
notifier_wrapper))
else:
cron_entry = os.path.join(self.options['cron-entries'], url_hash)
cron_entry = os.path.join(self.options['cron-entries'], slave_id)
with open(cron_entry, 'w') as cron_entry_file:
cron_entry_file.write('%s %s' % (entry['frequency'], wrapper))
cron_entry_file.write('%s %s' % (entry['frequency'], notifier_wrapper))
path_list.append(cron_entry)
return path_list
......@@ -194,7 +234,6 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
slaves = json.loads(self.options['slave-instance-list'])
known_hosts = KnownHostsFile(self.options['known-hosts'])
with known_hosts:
# XXX this API could be cleaner
for slave in slaves:
path_list.extend(self.add_slave(slave, known_hosts))
else:
......@@ -209,4 +248,3 @@ class Recipe(GenericSlapRecipe, Notify, Callback):
path_list.append(wrapper)
return path_list
......@@ -119,8 +119,11 @@ class Recipe(object):
))
slave = options.get('slave', 'false').lower() in \
librecipe.GenericBaseRecipe.TRUE_VALUES
# By default XXXX Way of doing it is ugly and dangerous
requested_state = options.get('state', buildout['slap-connection'].get('requested','started'))
options['requested-state'] = requested_state
slap = slapmodule.slap()
slap.initializeConnection(
options['server-url'],
......@@ -134,6 +137,7 @@ class Recipe(object):
self._raise_request_exception = None
self._raise_request_exception_formatted = None
self.instance = None
# Try to do the request and fetch parameter dict...
try:
self.instance = request(software_url, software_type,
......@@ -141,14 +145,17 @@ class Recipe(object):
filter_kw=filter_kw, shared=slave, state=requested_state)
return_parameter_dict = self._getReturnParameterDict(self.instance,
return_parameters)
# Fetch the instance-guid and the instance-state
# Note: SlapOS Master does not support it for slave instances
if not slave:
try:
options['instance-guid'] = self.instance.getInstanceGuid()
# XXX: deprecated, to be removed
options['instance_guid'] = self.instance.getInstanceGuid()
options['instance-state'] = self.instance.getState()
except (slapmodule.ResourceNotReady, AttributeError):
# Backward compatibility. Old SlapOS master and core don't know this.
self.logger.warning("Impossible to fetch instance GUID.")
self.logger.warning("Impossible to fetch instance GUID nor state.")
except (slapmodule.NotFoundError, slapmodule.ServerError, slapmodule.ResourceNotReady) as exc:
self._raise_request_exception = exc
self._raise_request_exception_formatted = traceback.format_exc()
......@@ -162,13 +169,6 @@ class Recipe(object):
except KeyError:
if self.failed is None:
self.failed = param
options['requested-state'] = requested_state
try:
options['instance-state'] = self.instance.getState()
except slapmodule.ResourceNotReady:
# Odd case: SlapOS Master doesn't send the state of a slave partition.
# XXX Should be fixed in the SlapOS Master, we should not care here.
pass
def _filterForStorage(self, partition_parameter_kw):
return partition_parameter_kw
......
......@@ -124,8 +124,8 @@ class ImportRecipe(GenericBaseRecipe):
ifs=$IFS IFS=';'
read user pass remaining < %(etc-directory)s/.users
IFS=$ifs
%(curl-binary)s -vg6L -F clogin="$user" -F cpwd="$pass" --dump-header login_cookie %(backend-url)s/doLogin;
%(curl-binary)s -vg6LX POST --cookie login_cookie --max-time 5 %(backend-url)s/runSoftwareProfile;
%(curl-binary)s --insecure -vg6L -F clogin="$user" -F cpwd="$pass" --dump-header login_cookie %(backend-url)s/doLogin;
%(curl-binary)s --insecure -vg6LX POST --cookie login_cookie --max-time 5 %(backend-url)s/runSoftwareProfile;
rm -f login_cookie
""" % self.options)
self.createExecutable(wrapper, content=content)
......
......@@ -39,7 +39,7 @@ environment = environment
[environment]
CPPFLAGS = -I${python2.7:location}/include/python2.7 -I${libxml2:location}/include -I${libxslt:location}/include
LDFLAGS = -L${python2.7:location}/lib -L${libxml2:location}/lib -L${libxslt:location}/lib -L${zlib:location}/lib
PYTHONPATH = ${python-setuptools:location}
PYTHONPATH = ${buildout:eggs-directory}/setuptools-${versions:setuptools}-py2.7.egg
LD_LIBRARY_PATH = ${libxslt:location}/lib:${libxml2:location}/lib:${zlib:location}/lib
[sh-environment]
......
......@@ -45,7 +45,7 @@ branch = master
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
md5sum = 6b919dd280d6972afe0f50d389ba5fe6
md5sum = e8b87313667c944567029c61c9c0bd2e
output = ${buildout:directory}/template.cfg
mode = 640
......@@ -94,7 +94,7 @@ netifaces = 0.8
# slapos.core==0.28.5
# zc.buildout==1.6.0-dev-SlapOS-006
# zc.recipe.egg==1.3.2
setuptools = 0.6c12dev-r88846
setuptools = 0.9.8
# Required by:
# slapos.cookbook==0.65
......
......@@ -94,15 +94,17 @@ mode = 0644
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-kvm-resilient.cfg.jinja2
mode = 644
md5sum = 6753004b582c0470bd028253ce1964ad
#md5sum = 6753004b582c0470bd028253ce1964ad
download-only = true
on-update = true
[template-kvm-resilient-test]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/instance-kvm-resilient-test.cfg.jinja2
md5sum = 027d68d9decbc6aec59365fa723975d7
#md5sum = 027d68d9decbc6aec59365fa723975d7
mode = 0644
download-only = true
on-update = true
[template-kvm-import]
recipe = slapos.recipe.template
......@@ -115,7 +117,7 @@ mode = 0644
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/template/kvm-import.sh.in
filename = kvm-import.sh.in
md5sum = a731372420dc59c0b5ba7bc5f39a14ad
md5sum = e03ed049cddd8d157228b09e1ebc071a
download-only = true
mode = 0755
......
......@@ -42,15 +42,19 @@ recipe = slapos.cookbook:request
software-url = ${slap-connection:software-release-url}
software-type = kvm-resilient
name = Resilient KVM (Root Instance)
config = virtual-hard-drive-url virtual-hard-drive-md5sum resiliency-backup-periodicity
{% set cluster_parameter_dict = slapparameter_dict.get('cluster', {}) -%}
config = virtual-hard-drive-url virtual-hard-drive-md5sum resiliency-backup-periodicity {{ cluster_parameter_dict.keys() | join(' ') }}
{% for key, value in cluster_parameter_dict.items() -%}
config-{{ key }} = {{ dumps(value) }}
{% endfor -%}
config-virtual-hard-drive-url = ${slap-parameter:virtual-hard-drive-url}
config-virtual-hard-drive-md5sum = ${slap-parameter:virtual-hard-drive-md5sum}
config-resiliency-backup-periodicity = */5
# We don't use url parameter, but we want it to be there to make sure root instance is ready.
return = url
# XXX What to do?
#sla = instance_guid
#sla-instance_guid = ${slap-parameter:frontend-instance-guid}
sla = computer_guid
sla-computer_guid = ${slap-connection:computer-id}
[slap-parameter]
virtual-hard-drive-url = https://softinst43236.host.vifib.net/data/public/8e2138.php?dl=true
......
......@@ -13,7 +13,7 @@ parts +=
{{ parts.replicate("kvm", "3") }}
publish-connection-informations
{{ replicated.replicate("kvm", "3", "kvm-export", "kvm-import") }}
{{ replicated.replicate("kvm", "3", "kvm-export", "kvm-import", slapparameter_dict=slapparameter_dict) }}
# Bubble down the parameters of the requested instance to the user
[request-kvm]
......
......@@ -34,6 +34,7 @@ cert = $${slap-connection:cert-file}
recipe = slapos.recipe.template:jinja2
template = ${template-kvm-resilient:location}/instance-kvm-resilient.cfg.jinja2
rendered = $${buildout:directory}/template-kvm-resilient.cfg
extensions = jinja2.ext.do
context =
key develop_eggs_directory buildout:develop-eggs-directory
key eggs_directory buildout:eggs-directory
......
......@@ -48,53 +48,58 @@ signature-certificate-list =
-----END CERTIFICATE-----
[versions]
Werkzeug = 0.9.3
apache-libcloud = 0.13.0
Werkzeug = 0.9.4
apache-libcloud = 0.13.2
async = 0.6.1
buildout-versions = 1.7
erp5.util = 0.4.36
gitdb = 0.5.4
itsdangerous = 0.22
itsdangerous = 0.23
lxml = 3.2.3
meld3 = 0.6.10
plone.recipe.command = 1.1
psutil = 1.1.0
pycrypto = 2.6
rdiff-backup = 1.0.5
slapos.cookbook = 0.79
slapos.cookbook = 0.84
slapos.recipe.cmmi = 0.2
slapos.recipe.download = 1.0.dev-r4053
slapos.recipe.template = 2.4.2
slapos.toolbox = 0.35.0
slapos.toolbox = 0.37
smmap = 0.8.2
websockify = 0.5.1
z3c.recipe.scripts = 1.0.1
# Required by:
# slapos.core==0.35.1
# slapos.toolbox==0.35.0
# slapos.toolbox==0.37
Flask = 0.10.1
# Required by:
# slapos.toolbox==0.35.0
# slapos.toolbox==0.37
GitPython = 0.3.2.RC1
# Required by:
# slapos.toolbox==0.35.0
# slapos.toolbox==0.37
atomize = 0.1.1
# Required by:
# slapos.toolbox==0.35.0
# paramiko==1.12.0
ecdsa = 0.8
# Required by:
# slapos.toolbox==0.37
feedparser = 5.1.3
# Required by:
# slapos.cookbook==0.79
# slapos.cookbook==0.84
inotifyx = 0.2.0-1
# Required by:
# slapos.cookbook==0.79
# slapos.cookbook==0.84
lock-file = 2.0
# Required by:
# slapos.cookbook==0.79
# slapos.cookbook==0.84
netaddr = 0.7.10
# Required by:
......@@ -106,37 +111,33 @@ netifaces = 0.8-1
numpy = 1.7.1
# Required by:
# slapos.toolbox==0.35.0
paramiko = 1.11.0
# Required by:
# slapos.toolbox==0.35.0
psutil = 1.0.1
# slapos.toolbox==0.37
paramiko = 1.12.0
# Required by:
# slapos.core==0.35.1
pyflakes = 0.7.3
# Required by:
# slapos.cookbook==0.79
pytz = 2013b
# slapos.cookbook==0.84
pytz = 2013d
# Required by:
# slapos.cookbook==0.79
# slapos.toolbox==0.35.0
# slapos.cookbook==0.84
# slapos.toolbox==0.37
slapos.core = 0.35.1
# Required by:
# slapos.core==0.35.1
supervisor = 3.0b2
supervisor = 3.0
# Required by:
# slapos.core==0.35.1
unittest2 = 0.5.1
# Required by:
# slapos.cookbook==0.79
# slapos.toolbox==0.35.0
# slapos.cookbook==0.84
# slapos.toolbox==0.37
xml-marshaller = 0.9.7
# Required by:
......
......@@ -3,5 +3,5 @@ DISK_PATH=${:disk-path}
BACKUP_PATH=${:backup-disk-path}
# TODO: Use rdiff
rm $DISK_PATH && \
rm $DISK_PATH
cp $BACKUP_PATH $DISK_PATH
......@@ -72,3 +72,20 @@ recipe = zc.recipe.egg
eggs =
collective.recipe.template
# Add slapos.libnetworkcache to path of slapos.core.
[slapos-cookbook]
eggs =
${lxml-python:egg}
slapos.cookbook
cliff
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
slapos.libnetworkcache
......@@ -2,7 +2,8 @@
parts =
slapos-configuration-file
cron-entry-slapos
slapos-node-format-wrapper
slapos-node-status-wrapper
slapos-node-format-wrapper-script
httpd-wrapper
......@@ -15,7 +16,10 @@ parts =
dropbear-server-add-authorized-key
sshkeys-authority
publish-connection-informations
dropbear-promise
httpd-promise
slapos-promise
eggs-directory = {{ eggs_directory }}
......@@ -45,7 +49,7 @@ bin = ${buildout:directory}/bin/
sshkeys = ${:srv}/sshkeys
service = ${:etc}/service/
scripts = ${:etc}/run/
script = ${:etc}/run/
ssh = ${:etc}/ssh/
log = ${:var}/log/
run = ${:var}/run/
......@@ -107,37 +111,55 @@ instance-root = ${directory:instance-root}
master-url = ${instance-parameter:configuration.master-url}
computer-id = ${instance-parameter:configuration.computer-id}
# XXX should be a parameter
#partition-amount = 10
partition-amount = 10
computer-definition-file = ${computer-definition-file:output}
computer-xml = ${directory:var}/slapos.xml
computer-key-file = ${slapos-computer-key-file:output}
computer-certificate-file = ${slapos-computer-certificate-file:output}
certificate-repository-path = ${directory:slapos-partitions-certificate-repository}
[slapos-node-format-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node format --cfg ${slapos-configuration-file:output} --logfile=${directory:slapos-log}/slapos-node-format.log --now
wrapper-path = ${directory:scripts}/slapos-node-format
parameters-extra = true
[slapos-node-instance-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node instance --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-instance.pid --logfile ${directory:slapos-log}/slapos-instance.cfg
wrapper-path = ${buildout:bin-directory}/slapos-node-instance
command-line = {{ bin_directory }}/slapos node instance --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-instance.pid --logfile ${directory:slapos-log}/slapos-instance.log
wrapper-path = ${directory:bin}/slapos-node-instance
parameters-extra = true
[slapos-node-software-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node software --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-software.pid --logfile ${directory:slapos-log}/slapos-software.cfg
wrapper-path = ${buildout:bin-directory}/slapos-node-software
command-line = {{ bin_directory }}/slapos node software --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-software.pid --logfile ${directory:slapos-log}/slapos-software.log
wrapper-path = ${directory:bin}/slapos-node-software
parameters-extra = true
[slapos-node-report-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node report --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-report.pid --logfile ${directory:slapos-log}/slapos-report.cfg
wrapper-path = ${buildout:bin-directory}/slapos-node-report
command-line = {{ bin_directory }}/slapos node report --cfg ${slapos-configuration-file:output} --pidfile ${directory:run}/slapos-report.pid --logfile ${directory:slapos-log}/slapos-report.log
wrapper-path = ${directory:bin}/slapos-node-report
parameters-extra = true
[slapos-node-status-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node status --cfg ${slapos-configuration-file:output}
wrapper-path = ${directory:bin}/slapos-node-status
parameters-extra = true
[slapos-node-format-wrapper]
recipe = slapos.cookbook:wrapper
command-line = {{ bin_directory }}/slapos node format --cfg ${slapos-configuration-file:output} --logfile=${directory:slapos-log}/slapos-node-format.log --now
wrapper-path = ${directory:bin}/slapos-node-format
parameters-extra = true
[slapos-node-format-wrapper-script]
# Create a wrapper of the wrapper in etc/run
recipe = collective.recipe.template
input = inline:#!{{ bash_executable }}
false
while [ ! $? -eq 0 ]; do
${slapos-node-format-wrapper:wrapper-path}
done
output = ${directory:script}/slapos-node-format
mode = 700
#########
# Deploy some http server to see logs online
#########
......@@ -165,23 +187,6 @@ wrapper-path = ${directory:service}/httpd
# generated parameter containing url to use for other sections
url = http://[${httpd-configuration-file:listening-ip}]/
#[httpd-wrapper]
#recipe = collective.recipe.template
#input = inline:
# #!${buildout:executable}
# import SimpleHTTPServer
# import SocketServer
# PORT = ${:listening-port}
# LISTENING_IP = '${:listening-ip}'
# Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
# httpd = SocketServer.TCPServer((LISTENING_IP, PORT), Handler)
# print "serving at port", PORT
# httpd.serve_forever()
#output = ${directory:service}/httpd
#listening-ip = ${instance-parameter:ipv6-random}
#listening-port = 8080
#mode = 755
#########
# Deploy logrotate
......@@ -303,15 +308,13 @@ recipe = slapos.cookbook:dropbear.add_authorized_key
key = ${instance-parameter:configuration.authorized-key}
# Deploy a frontend for log
# XXX TODO
#########
# Send informations to SlapOS Master
#########
[publish-connection-informations]
recipe = slapos.cookbook:publish
log-viewer-url = http://[${httpd-configuration-file:listening-ip}]:${httpd-configuration-file:listening-port}
ssh_command = ssh ${dropbear-server:host} -p ${dropbear-server:port}
#########
# Deploy promises scripts
......@@ -322,3 +325,16 @@ path = ${directory:promises}/dropbear
hostname = ${dropbear-server:host}
port = ${dropbear-server:port}
[httpd-promise]
recipe = slapos.cookbook:check_port_listening
path = ${directory:promises}/httpd
hostname = ${httpd-configuration-file:listening-ip}
port = ${httpd-configuration-file:listening-port}
[slapos-promise]
recipe = collective.recipe.template
input = inline:#!/{{ bash_executable }}
{{ bin_directory }}/slapgrid-supervisorctl ${slapos-configuration-file:output} status watchdog | grep RUNNING
output = ${directory:promises}/slapos
mode = 0700
......@@ -9,9 +9,55 @@ extends = common.cfg
[networkcache]
# signature certificates of the following uploaders.
# Romain Courteaud
# Sebastien Robin
# Kazuhiko Shiozaki
# Cedric de Saint Martin
# Test Agent
# Yingjie Xu
# Gabriel Monnerat
# Łukasz Nowak
# Test Agent (Automatic update from tests)
# Aurélien Calonne
signature-certificate-list =
-----BEGIN CERTIFICATE-----
MIIB4DCCAUkCADANBgkqhkiG9w0BAQsFADA5MQswCQYDVQQGEwJGUjEZMBcGA1UE
CBMQRGVmYXVsdCBQcm92aW5jZTEPMA0GA1UEChMGTmV4ZWRpMB4XDTExMDkxNTA5
MDAwMloXDTEyMDkxNTA5MDAwMlowOTELMAkGA1UEBhMCRlIxGTAXBgNVBAgTEERl
ZmF1bHQgUHJvdmluY2UxDzANBgNVBAoTBk5leGVkaTCBnzANBgkqhkiG9w0BAQEF
AAOBjQAwgYkCgYEApYZv6OstoqNzxG1KI6iE5U4Ts2Xx9lgLeUGAMyfJLyMmRLhw
boKOyJ9Xke4dncoBAyNPokUR6iWOcnPHtMvNOsBFZ2f7VA28em3+E1JRYdeNUEtX
Z0s3HjcouaNAnPfjFTXHYj4um1wOw2cURSPuU5dpzKBbV+/QCb5DLheynisCAwEA
ATANBgkqhkiG9w0BAQsFAAOBgQBCZLbTVdrw3RZlVVMFezSHrhBYKAukTwZrNmJX
mHqi2tN8tNo6FX+wmxUUAf3e8R2Ymbdbn2bfbPpcKQ2fG7PuKGvhwMG3BlF9paEC
q7jdfWO18Zp/BG7tagz0jmmC4y/8akzHsVlruo2+2du2freE8dK746uoMlXlP93g
QUUGLQ==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB8jCCAVugAwIBAgIJAPu2zchZ2BxoMA0GCSqGSIb3DQEBBQUAMBIxEDAOBgNV
BAMMB3RzeGRldjMwHhcNMTExMDE0MTIxNjIzWhcNMTIxMDEzMTIxNjIzWjASMRAw
DgYDVQQDDAd0c3hkZXYzMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCrPbh+
YGmo6mWmhVb1vTqX0BbeU0jCTB8TK3i6ep3tzSw2rkUGSx3niXn9LNTFNcIn3MZN
XHqbb4AS2Zxyk/2tr3939qqOrS4YRCtXBwTCuFY6r+a7pZsjiTNddPsEhuj4lEnR
L8Ax5mmzoi9nE+hiPSwqjRwWRU1+182rzXmN4QIDAQABo1AwTjAdBgNVHQ4EFgQU
/4XXREzqBbBNJvX5gU8tLWxZaeQwHwYDVR0jBBgwFoAU/4XXREzqBbBNJvX5gU8t
LWxZaeQwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQA07q/rKoE7fAda
FED57/SR00OvY9wLlFEF2QJ5OLu+O33YUXDDbGpfUSF9R8l0g9dix1JbWK9nQ6Yd
R/KCo6D0sw0ZgeQv1aUXbl/xJ9k4jlTxmWbPeiiPZEqU1W9wN5lkGuLxV4CEGTKU
hJA/yXa1wbwIPGvX3tVKdOEWPRXZLg==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB7jCCAVegAwIBAgIJAJWA0jQ4o9DGMA0GCSqGSIb3DQEBBQUAMA8xDTALBgNV
BAMMBHg2MXMwIBcNMTExMTI0MTAyNDQzWhgPMjExMTEwMzExMDI0NDNaMA8xDTAL
BgNVBAMMBHg2MXMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANdJNiFsRlkH
vq2kHP2zdxEyzPAWZH3CQ3Myb3F8hERXTIFSUqntPXDKXDb7Y/laqjMXdj+vptKk
3Q36J+8VnJbSwjGwmEG6tym9qMSGIPPNw1JXY1R29eF3o4aj21o7DHAkhuNc5Tso
67fUSKgvyVnyH4G6ShQUAtghPaAwS0KvAgMBAAGjUDBOMB0GA1UdDgQWBBSjxFUE
RfnTvABRLAa34Ytkhz5vPzAfBgNVHSMEGDAWgBSjxFUERfnTvABRLAa34Ytkhz5v
PzAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAFLDS7zNhlrQYSQO5KIj
z2RJe3fj4rLPklo3TmP5KLvendG+LErE2cbKPqnhQ2oVoj6u9tWVwo/g03PMrrnL
KrDm39slYD/1KoE5kB4l/p6KVOdeJ4I6xcgu9rnkqqHzDwI4v7e8/D3WZbpiFUsY
vaZhjNYKWQf79l6zXfOvphzJ
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB9jCCAV+gAwIBAgIJAO4V/jiMoICoMA0GCSqGSIb3DQEBBQUAMBMxETAPBgNV
BAMMCENPTVAtMjMyMCAXDTEyMDIxNjExMTAyM1oYDzIxMTIwMTIzMTExMDIzWjAT
......@@ -26,6 +72,45 @@ signature-certificate-list =
If1a2ZoqHRxoNo2yTmm7TSYRORWVS+vvfjY=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB9jCCAV+gAwIBAgIJAIlBksrZVkK8MA0GCSqGSIb3DQEBBQUAMBMxETAPBgNV
BAMMCENPTVAtMzU3MCAXDTEyMDEyNjEwNTUyOFoYDzIxMTIwMTAyMTA1NTI4WjAT
MREwDwYDVQQDDAhDT01QLTM1NzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
ts+iGUwi44vtIfwXR8DCnLtHV4ydl0YTK2joJflj0/Ws7mz5BYkxIU4fea/6+VF3
i11nwBgYgxQyjNztgc9u9O71k1W5tU95yO7U7bFdYd5uxYA9/22fjObaTQoC4Nc9
mTu6r/VHyJ1yRsunBZXvnk/XaKp7gGE9vNEyJvPn2bkCAwEAAaNQME4wHQYDVR0O
BBYEFKuGIYu8+6aEkTVg62BRYaD11PILMB8GA1UdIwQYMBaAFKuGIYu8+6aEkTVg
62BRYaD11PILMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAMoTRpBxK
YLEZJbofF7gSrRIcrlUJYXfTfw1QUBOKkGFFDsiJpEg4y5pUk1s5Jq9K3SDzNq/W
it1oYjOhuGg3al8OOeKFrU6nvNTF1BAvJCl0tr3POai5yXyN5jlK/zPfypmQYxE+
TaqQSGBJPVXYt6lrq/PRD9ciZgKLOwEqK8w=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB9jCCAV+gAwIBAgIJAPHoWu90gbsgMA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
BAMMCXZpZmlibm9kZTAeFw0xMjAzMTkyMzIwNTVaFw0xMzAzMTkyMzIwNTVaMBQx
EjAQBgNVBAMMCXZpZmlibm9kZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
ozBijpO8PS5RTeKTzA90vi9ezvv4vVjNaguqT4UwP9+O1+i6yq1Y2W5zZxw/Klbn
oudyNzie3/wqs9VfPmcyU9ajFzBv/Tobm3obmOqBN0GSYs5fyGw+O9G3//6ZEhf0
NinwdKmrRX+d0P5bHewadZWIvlmOupcnVJmkks852BECAwEAAaNQME4wHQYDVR0O
BBYEFF9EtgfZZs8L2ZxBJxSiY6eTsTEwMB8GA1UdIwQYMBaAFF9EtgfZZs8L2ZxB
JxSiY6eTsTEwMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAc43YTfc6
baSemaMAc/jz8LNLhRE5dLfLOcRSoHda8y0lOrfe4lHT6yP5l8uyWAzLW+g6s3DA
Yme/bhX0g51BmI6gjKJo5DoPtiXk/Y9lxwD3p7PWi+RhN+AZQ5rpo8UfwnnN059n
yDuimQfvJjBFMVrdn9iP6SfMjxKaGk6gVmI=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB9jCCAV+gAwIBAgIJAMNZBmoIOXPBMA0GCSqGSIb3DQEBBQUAMBMxETAPBgNV
BAMMCENPTVAtMTMyMCAXDTEyMDUwMjEyMDQyNloYDzIxMTIwNDA4MTIwNDI2WjAT
MREwDwYDVQQDDAhDT01QLTEzMjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
6peZQt1sAmMAmSG9BVxxcXm8x15kE9iAplmANYNQ7z2YO57c10jDtlYlwVfi/rct
xNUOKQtc8UQtV/fJWP0QT0GITdRz5X/TkWiojiFgkopza9/b1hXs5rltYByUGLhg
7JZ9dZGBihzPfn6U8ESAKiJzQP8Hyz/o81FPfuHCftsCAwEAAaNQME4wHQYDVR0O
BBYEFNuxsc77Z6/JSKPoyloHNm9zF9yqMB8GA1UdIwQYMBaAFNuxsc77Z6/JSKPo
yloHNm9zF9yqMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAl4hBaJy1
cgiNV2+Z5oNTrHgmzWvSY4duECOTBxeuIOnhql3vLlaQmo0p8Z4c13kTZq2s3nhd
Loe5mIHsjRVKvzB6SvIaFUYq/EzmHnqNdpIGkT/Mj7r/iUs61btTcGUCLsUiUeci
Vd0Ozh79JSRpkrdI8R/NRQ2XPHAo+29TT70=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB9jCCAV+gAwIBAgIJAKRvzcy7OH0UMA0GCSqGSIb3DQEBBQUAMBMxETAPBgNV
BAMMCENPTVAtNzcyMCAXDTEyMDgxMDE1NDI1MVoYDzIxMTIwNzE3MTU0MjUxWjAT
MREwDwYDVQQDDAhDT01QLTc3MjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
......@@ -38,8 +123,23 @@ signature-certificate-list =
5pW18Ry5Ie7iFK4cQMerZwWPxBodEbAteYlRsI6kePV7Gf735Y1RpuN8qZ2sYL6e
x2IMeSwJ82BpdEI5niXxB+iT0HxhmR+XaMI=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIB+DCCAWGgAwIBAgIJAKGd0vpks6T/MA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
BAMMCUNPTVAtMTU4NDAgFw0xMzA2MjAxMjE5MjBaGA8yMTEzMDUyNzEyMTkyMFow
FDESMBAGA1UEAwwJQ09NUC0xNTg0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
gQDZTH9etPUC+wMZQ3UIiOwyyCfHsJ+7duCFYjuo1uZrhtDt/fp8qb8qK9ob+df3
EEYgA0IgI2j/9jNUEnKbc5+OrfKznzXjrlrH7zU8lKBVNCLzQuqBKRNajZ+UvO8R
nlqK2jZCXP/p3HXDYUTEwIR5W3tVCEn/Vda4upTLcPVE5wIDAQABo1AwTjAdBgNV
HQ4EFgQU7KXaNDheQWoy5uOU01tn1M5vNkEwHwYDVR0jBBgwFoAU7KXaNDheQWoy
5uOU01tn1M5vNkEwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQASmqCU
Znbvu6izdicvjuE3aKnBa7G++Fdp2bdne5VCwVbVLYCQWatB+n4crKqGdnVply/u
+uZ16u1DbO9rYoKgWqjLk1GfiLw5v86pd5+wZd5I9QJ0/Sbz2vZk5S4ciMIGwArc
m711+GzlW5xe6GyH9SZaGOPAdUbI6JTDwLzEgA==
-----END CERTIFICATE-----
[versions]
slapos.libnetworkcache = 0.13.4
Jinja2 = 2.7.1
MarkupSafe = 0.18
Pygments = 1.6
......
......@@ -18,6 +18,6 @@ alter_user = false
alter_network = false
input_definition_file = ${:computer-definition-file}
computer_xml = ${:computer-xml}
partition_amount = 1
partition_amount = ${:partition-amount}
create_tap = false
......@@ -94,7 +94,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-runner-export.cfg.in
output = ${buildout:directory}/instance-runner-export.cfg
md5sum = 7e71622c09271790b5cef21c8613b8ac
md5sum = 9ce3482e64a0c957f7a5f25ad0dc03ae
mode = 0644
[template-resilient]
......@@ -107,7 +107,7 @@ mode = 0644
[template-resilient-test]
recipe = slapos.recipe.download
url = ${:_profile_base_location_}/instance-resilient-test.cfg.jinja2
#md5sum = 0ee2cea5239278a8c1572d7a04798fdc
md5sum = ac772d3a1cce4072acfabd563df449bb
filename = instance-resilient-test.cfg.jinja2
mode = 0644
......@@ -142,3 +142,21 @@ eggs =
slapos.core
xml_marshaller
pytz
# Add slapos.libnetworkcache to path of slapos.core so that slaprunner can build SRs using cache
[slapos-cookbook]
eggs =
${lxml-python:egg}
slapos.cookbook
cliff
hexagonit.recipe.download
inotifyx
netaddr
netifaces
requests
slapos.core
supervisor
xml_marshaller
pytz
slapos.libnetworkcache
......@@ -39,8 +39,7 @@ git-executable = ${git:location}/bin/git
[erp5.util-repository]
recipe = slapos.recipe.build:gitclone
repository = http://git.erp5.org/repos/erp5.git
#branch = scalability-master2
revision = f10da882ab5e1dc03a812f3d0e7390dc8da2b59
branch = scalability-master2
git-executable = ${git:location}/bin/git
#[slapos.core-repository]
......
......@@ -29,7 +29,7 @@ command-line = {{ bin_directory }}/runResiliencyTest ${:testnode-parameters} ${:
wrapper-path = ${directory:scripts}/runResiliencyTestSuite
[deploy-standalone-resiliency-test]
# Used to manually run the KVM test if we don't have a running testnode.
# Used to manually run the resilient test if we don't have a running testnode.
recipe = slapos.cookbook:wrapper
test-suite-title = slaprunner
command-line = {{ bin_directory }}/runStandaloneResiliencyTest --test-suite-title=${:test-suite-title} ${deploy-resiliency-test:test-parameters}
......@@ -41,7 +41,11 @@ recipe = slapos.cookbook:request
software-url = ${slap-connection:software-release-url}
software-type = resilient
name = Resilient Instance (Root Instance)
config = resiliency-backup-periodicity frontend-domain cloud9-frontend-domain
{% set cluster_parameter_dict = slapparameter_dict.get('cluster', {}) -%}
config = resiliency-backup-periodicity frontend-domain cloud9-frontend-domain {{ cluster_parameter_dict.keys() | join(' ') }}
{% for key, value in cluster_parameter_dict.items() -%}
config-{{ key }} = {{ dumps(value) }}
{% endfor -%}
config-resiliency-backup-periodicity = *
# XXX hardcoded
config-frontend-domain = google.com
......
......@@ -22,7 +22,7 @@ parts +=
symlinks
node-frontend-promise
nginx-promise
urls
publish-connection-informations
cron-entry-backup
[exporter]
......@@ -36,11 +36,5 @@ rsync-binary = ${rsync:location}/bin/rsync
# Extends publish section with resilient parameters
[urls]
[publish-connection-informations]
<= resilient-publish-connection-parameter
backend_url = $${slaprunner:access-url}
url = https://$${request-frontend:connection-domain}
cloud9_backend_url = $${node-frontend:access-url}
cloud9_url = https://$${request-cloud9-frontend:connection-domain}
ssh_command = ssh $${dropbear-runner-server:host} -p $${dropbear-runner-server:port}
password_recovery_code = $${recovery-code:passwd}
......@@ -200,7 +200,7 @@ extra-context =
[template-tidstorage]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/instance-tidstorage.cfg.in
md5sum = 5572d10b343bd2de90deeaf55dd0fdc7
md5sum = 2bc13fc6cd52c7b8e2b4ddb99a69974b
mode = 640
[template-cloudooo]
......@@ -414,6 +414,7 @@ eggs =
${pycrypto-python:egg}
lock_file
PyXML
Pympler
SOAPpy
cElementTree
chardet
......@@ -422,6 +423,7 @@ eggs =
erp5diff
inotifyx
ipdb
Jinja2
mechanize
numpy
ordereddict
......@@ -446,6 +448,8 @@ eggs =
huBarcode
qrcode
spyne
# Needed for checking ZODB Components source code
pylint
# Zope
ZODB3
......@@ -686,3 +690,4 @@ xupdate-processor = 0.4
zope.app.debug = 3.4.1
zope.app.dependable = 3.5.1
zope.app.form = 4.0.2
pylint = 1.0.0
* Report, from pbs and from clone, when a backup failed
* Make sure, when a takeover is done, that "importer" script finishes to run while importer instance is changed into exporter.
* Test that, after a successful backup/takeover, another backup is possible and will be successful.
* PBSs and mirrors should monitor/replace themselves
* Report errors from backup
* If a PBS master is down and then back again, it might want to participate in the ongoing election, then.. what happens?
* If the network is partitioned (the two backups don't see each other, but each can see the slapos master) there will be two concurrent elections taking place, with two winners and two renames.
* How to know that a backup is working? define "check that it works". Does it deploys? But then, how to ensure data integrity? By application?
* How to ensure "synchronization" between two main instances? example: Wordpress: mysql is down, then replaced, then inconsistency between apache and the new mysql
* How to deal with big data? I.e how to have working backup/restore system of 1TB data with slow connection?
* How to be sure that elected importer contains a/ the latest data and b/ has finished to pull. We should prevent importer not having a/ and b/ to become the main.
* How to say "I don't want this instance to be here" + Be able to define "here". Allows to automate deployment of PBS and backup instances
* Should we crypt backed up data?
* If a PBS is lost, a new PBS should be created from another one, in order ot keep history
......
......@@ -30,7 +30,7 @@ parts =
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready.cfg.in
output = ${buildout:directory}/pbsready.cfg
md5sum = 9f4212a79f10bee8f6d75061943110e2
md5sum = 570e0b54c97d510befa2ea981c1e90e0
mode = 0644
[pbsready-import]
......@@ -39,7 +39,7 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-import.cfg.in
output = ${buildout:directory}/pbsready-import.cfg
md5sum = 3c2e73f49abdc52282fc045e6d91f3e9
md5sum = cc9c776500ccd07cb51969beb68ffcda
mode = 0644
[pbsready-export]
......@@ -48,20 +48,20 @@ mode = 0644
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/pbsready-export.cfg.in
output = ${buildout:directory}/pbsready-export.cfg
md5sum = 5e27c391ceafb6a58032f1f87fba7826
md5sum = 25d05b3929fb4c6cf275866bad678d6a
mode = 0644
[template-pull-backup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pull-backup.cfg.in
output = ${buildout:directory}/instance-pull-backup.cfg
md5sum = c67a9dad66490ae264f9e7003521bf59
md5sum = c49e5911b94078d87b94507fb4efc93b
mode = 0644
[template-replicated]
recipe = slapos.recipe.download
url = ${:_profile_base_location_}/template-replicated.cfg.in
#md5sum = 9e20f283bf709c63c9c6692d5e1f8972
md5sum = c762a625f65193bc8a570b4d56a0d08c
mode = 0644
destination = ${buildout:directory}/template-replicated.cfg.in
......
......@@ -75,7 +75,7 @@ callbacks = $${directory:notifier-callbacks}
id-file = $${rootdirectory:etc}/notifier.id
equeue-socket = $${equeue:socket}
host = $${slap-network-information:global-ipv6}
port = 8080
port = 8088
wrapper = $${basedirectory:services}/notifier
server-binary = ${buildout:bin-directory}/pubsubserver
notifier-binary = ${buildout:bin-directory}/pubsubnotifier
......@@ -105,6 +105,7 @@ promises-directory = $${basedirectory:promises}
directory = $${directory:pbs-backup}
cron-entries = $${cron:cron-entries}
wrappers-directory = $${directory:pbs-wrappers}
run-directory = $${basedirectory:run}
# XXX: this should be named "notifier-host"
notifier-url = http://[$${notifier:host}]:$${notifier:port}
slave-instance-list = $${slap-parameter:slave_instance_list}
......
......@@ -25,6 +25,12 @@ home = $${buildout:directory}
var = $${:home}/var
pid = $${:var}/pid
# Define port of ssh server. It has to be different from import so that it
# supports export/import using same IP (slaprunner, slapos-in-partition,
# ipv4...)
[dropbear-server]
port = 22221
[resilient-publish-connection-parameter]
notification-id = http://[$${notifier:host}]:$${notifier:port}/get/$${notifier-exporter:name}
......
......@@ -23,6 +23,12 @@ parts =
[resilient-publish-connection-parameter]
notification-url = http://[$${notifier:host}]:$${notifier:port}/notify
# Define port of ssh server. It has to be different from import so that it
# supports export/import using same IP (slaprunner, slapos-in-partition,
# ipv4...)
[dropbear-server]
port = 22220
[import-on-notification]
# notifier.callback runs a script when a notification (sent by a parent PBS)
# is received
......
......@@ -213,7 +213,8 @@ wrapper = $${basedirectory:services}/sshd
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
# Explicitely excludes to define "port" argument. It will be defined in
# pbs-ready-import.cfg.in and pbs-ready-export.cfg.in
home = $${directory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = $${rdiff-backup-server:wrapper}
......
......@@ -79,7 +79,7 @@ software-url = ${slap-connection:software-release-url}
software-type = {{typeimport}}
return = ssh-public-key ssh-url notification-url ip
pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-push
pbs-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-push
config = number authorized-key on-notification ip-list namebase
config-number = {{id}}
......@@ -133,8 +133,6 @@ config-ip-list = ${request-{{namebase}}:connection-ip}{% for j in range(1,nbback
## Having 3 backups pulling from the same PBS provides
##only availability, not resiliency
## WARNING : SLAVES ARE ALLOCATED AT RANDOM, THIS NEEDS TO BE FIXED.
[request-pbs-common]
<= slap-connection
recipe = slapos.cookbook:request
......@@ -173,15 +171,16 @@ sla-{{ key }} = {{ value }}
[request-pull-backup-server-{{namebase}}-{{id}}]
<= request-pbs-common
name = PBS {{id}} pulling from ${request-{{namebase}}:name}
config = url name type server-key on-notification notify notification-id title
config = url name type server-key on-notification notify notification-id title remove-backup-older-than
config-url = ${request-{{namebase}}:connection-ssh-url}
config-name = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}
config-type = pull
config-server-key = ${request-{{namebase}}:connection-ssh-public-key}
config-on-notification = ${request-{{namebase}}:connection-notification-id}
config-notify = ${request-pbs-{{namebase}}-{{id}}:connection-notification-url}
config-notification-id = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}-pull
config-name = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}
config-title = Pulling from {{namebase}}
config-remove-backup-older-than = {{ slapparameter_dict.get('remove-backup-older-than', '3B') }}
slave = true
sla = instance_guid
sla-instance_guid = ${request-pbs-{{namebase}}-{{id}}:instance_guid}
......@@ -191,12 +190,12 @@ sla-instance_guid = ${request-pbs-{{namebase}}-{{id}}:instance_guid}
name = PBS pushing on ${request-{{namebase}}-pseudo-replicating-{{id}}:name}
config = url name type server-key on-notification notify notification-id title
config-url = ${request-{{namebase}}-pseudo-replicating-{{id}}:connection-ssh-url}
config-name = ${request-pull-backup-server-{{namebase}}-{{id}}:config-name}
config-type = push
config-server-key = ${request-{{namebase}}-pseudo-replicating-{{id}}:connection-ssh-public-key}
config-on-notification = ${request-pbs-{{namebase}}-{{id}}:connection-feeds-url}${request-pull-backup-server-{{namebase}}-{{id}}:config-notification-id}
config-notify = ${request-{{namebase}}-pseudo-replicating-{{id}}:connection-notification-url}
config-notification-id = ${request-{{namebase}}-pseudo-replicating-{{id}}:pbs-notification-id}
config-name = ${slap-connection:computer-id}-${slap-connection:partition-id}-{{namebase}}-{{id}}
config-title = Pushing to {{namebase}} backup {{id}}
slave = true
sla = instance_guid
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment