Commit 5e9161cd authored by Alain Takoudjou's avatar Alain Takoudjou

Update Release Candidate

parents dc91319d 6db6993a
...@@ -79,7 +79,6 @@ mistune = 0.7.3 ...@@ -79,7 +79,6 @@ mistune = 0.7.3
nbformat = 4.3.0 nbformat = 4.3.0
notebook = 4.4.1 notebook = 4.4.1
pandas = 0.19.2 pandas = 0.19.2
plone.recipe.command = 1.1
prompt-toolkit = 1.0.13 prompt-toolkit = 1.0.13
ptyprocess = 0.5.1 ptyprocess = 0.5.1
pyzmq = 16.0.2 pyzmq = 16.0.2
...@@ -92,7 +91,6 @@ tornado = 4.4.2 ...@@ -92,7 +91,6 @@ tornado = 4.4.2
widgetsnbextension = 2.0.0 widgetsnbextension = 2.0.0
# numpy >= 1.13.1 is required for numpy.core.multiarray # numpy >= 1.13.1 is required for numpy.core.multiarray
numpy = 1.13.1 numpy = 1.13.1
certifi = 2020.6.20
# nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source. # nbconvert 4.2.0 depends on entrypoints egg that is not available as tar/zip source.
nbconvert = 4.1.0 nbconvert = 4.1.0
......
...@@ -79,7 +79,6 @@ setup(name=name, ...@@ -79,7 +79,6 @@ setup(name=name,
'addresiliency = slapos.recipe.addresiliency:Recipe', 'addresiliency = slapos.recipe.addresiliency:Recipe',
'apacheperl = slapos.recipe.apacheperl:Recipe', 'apacheperl = slapos.recipe.apacheperl:Recipe',
'apachephp = slapos.recipe.apachephp:Recipe', 'apachephp = slapos.recipe.apachephp:Recipe',
'apachephpconfigure = slapos.recipe.apachephpconfigure:Recipe',
'apacheproxy = slapos.recipe.apacheproxy:Recipe', 'apacheproxy = slapos.recipe.apacheproxy:Recipe',
'certificate_authority = slapos.recipe.certificate_authority:Recipe', 'certificate_authority = slapos.recipe.certificate_authority:Recipe',
'certificate_authority.request = slapos.recipe.certificate_authority:Request', 'certificate_authority.request = slapos.recipe.certificate_authority:Request',
...@@ -108,14 +107,12 @@ setup(name=name, ...@@ -108,14 +107,12 @@ setup(name=name,
'generic.memcached = slapos.recipe.generic_memcached:Recipe', 'generic.memcached = slapos.recipe.generic_memcached:Recipe',
'generic.mysql.wrap_update_mysql = slapos.recipe.generic_mysql:WrapUpdateMySQL', 'generic.mysql.wrap_update_mysql = slapos.recipe.generic_mysql:WrapUpdateMySQL',
'gitinit = slapos.recipe.gitinit:Recipe', 'gitinit = slapos.recipe.gitinit:Recipe',
'haproxy = slapos.recipe.haproxy:Recipe',
'ipv4toipv6 = slapos.recipe.6tunnel:FourToSix', 'ipv4toipv6 = slapos.recipe.6tunnel:FourToSix',
'ipv6toipv4 = slapos.recipe.6tunnel:SixToFour', 'ipv6toipv4 = slapos.recipe.6tunnel:SixToFour',
'jsondump = slapos.recipe.jsondump:Recipe', 'jsondump = slapos.recipe.jsondump:Recipe',
'logrotate = slapos.recipe.logrotate:Recipe', 'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part', 'logrotate.d = slapos.recipe.logrotate:Part',
'mkdirectory = slapos.recipe.mkdirectory:Recipe', 'mkdirectory = slapos.recipe.mkdirectory:Recipe',
'mioga.instantiate = slapos.recipe.mioga.instantiate:Recipe',
'nbdserver = slapos.recipe.nbdserver:Recipe', 'nbdserver = slapos.recipe.nbdserver:Recipe',
'neoppod.cluster = slapos.recipe.neoppod:Cluster', 'neoppod.cluster = slapos.recipe.neoppod:Cluster',
'neoppod.admin = slapos.recipe.neoppod:Admin', 'neoppod.admin = slapos.recipe.neoppod:Admin',
......
##############################################################################
#
# Copyright (c) 2012 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from __future__ import print_function
from slapos.recipe.librecipe import GenericBaseRecipe
import zc.buildout
import sys
import zc.recipe.egg
class Recipe(GenericBaseRecipe):
def install(self):
"""
Taken out from the old "lamp" recipe. Allows do configure a LAMP instance.
"""
self.path_list = []
document_root = self.options['htdocs']
url = self.options.get('url', '')
mysql_conf = {'mysql_host': self.options['mysql-host'],
'mysql_port': self.options['mysql-port'],
'mysql_user': self.options['mysql-username'],
'mysql_password': self.options['mysql-password'],
'mysql_database': self.options['mysql-database'],
}
self.configureInstallation(document_root, url, mysql_conf)
return self.path_list
def configureInstallation(self, document_root, url, mysql_conf):
"""Start process which can launch python scripts, move or remove files or
directories when installing software.
"""
if not self.options.has_key('delete') and not self.options.has_key('rename') and not\
self.options.has_key('chmod') and not self.options.has_key('script') and not self.options.has_key('sql-script'):
return ""
delete = []
chmod = []
data = []
rename = []
rename_list = ""
argument = [self.options['lampconfigure'], "-H", mysql_conf['mysql_host'], "-P", mysql_conf['mysql_port'],
"-p", mysql_conf['mysql_password'], "-u", mysql_conf['mysql_user']]
if not self.options.has_key('file_token'):
argument = argument + ["-d", mysql_conf['mysql_database'],
"--table", self.options['table_name'].strip(), "--cond",
self.options.get('constraint', '1').strip()]
else:
argument = argument + ["-f", self.options['file_token'].strip()]
argument += ["-t", document_root]
if self.options.has_key('delete'):
delete = ["delete"]
for fname in self.options['delete'].split(','):
delete.append(fname.strip())
if self.options.has_key('rename'):
for fname in self.options['rename'].split(','):
if fname.find("=>") < 0:
old_name = fname
fname = []
fname.append(old_name)
fname.append(old_name + '-' + mysql_conf['mysql_user'])
else:
fname = fname.split("=>")
cmd = ["rename"]
if self.options.has_key('rename_chmod'):
cmd += ["--chmod", self.options['rename_chmod'].strip()]
rename.append(cmd + [fname[0].strip(), fname[1].strip()])
rename_list += fname[0] + " to " + fname[1] + " "
if self.options.has_key('chmod'):
chmod = ["chmod", self.options['mode'].strip()]
for fname in self.options['chmod'].split(','):
chmod.append(fname.strip())
if self.options.has_key('script') and \
self.options['script'].strip().endswith(".py"):
data = ["run", self.options['script'].strip(), "-v", mysql_conf['mysql_database'], url, document_root]
if self.options.has_key('sql-script'):
data = ["sql", self.options['sql-script'].strip(), "-v", mysql_conf['mysql_database'], url, document_root]
# TODO factor
if delete != []:
print("Creating lampconfigure with 'delete' arguments")
command = argument + delete
if rename != []:
for parameters in rename:
print("Creating lampconfigure with 'rename' arguments")
command = argument + rename
if chmod != []:
print("Creating lampconfigure with 'chmod' arguments")
command = argument + chmod
if data != []:
print("Creating lampconfigure with 'run' arguments")
command = argument + data
configureinstall_wrapper_path = self.createPythonScript(
self.options['configureinstall-location'],
__name__ + '.runner.executeRunner',
(argument, delete, rename, chmod, data)
)
#TODO finish to port this and remove upper one
#configureinstall_wrapper_path = self.createPythonScript(
# self.options['configureinstall-location'],
# 'slapos.lamp.run',
# [command]
#)
self.path_list.append(configureinstall_wrapper_path)
return rename_list
import subprocess
def executeRunner(arguments, delete, rename, chmod, data):
"""Start the instance configure. this may run a python script, move or/and rename
file or directory when dondition is filled. the condition may be when file exist or when an entry
exist into database.
"""
if delete:
print "Calling lampconfigure with 'delete' arguments"
subprocess.call(arguments + delete)
if rename:
for parameters in rename:
print "Calling lampconfigure with 'rename' arguments"
subprocess.call(arguments + parameters)
if chmod:
print "Calling lampconfigure with 'chmod' arguments"
subprocess.call(arguments + chmod)
if data:
print "Calling lampconfigure with 'run' arguments"
print arguments + data
subprocess.call(arguments + data)
##############################################################################
#
# Copyright (c) 2011 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""
haproxy instance configuration.
name -- local name of the haproxy
wrapper-path -- location of the init script to generate
binary-path -- location of the haproxy command
ctl-path -- location of the haproxy control script
conf-path -- location of the configuration file
socket-path -- location of the socket file for administration
ip -- ip of the haproxy server
port -- port of the haproxy server
server-check-path -- path of the domain to check
address -- string with list of all url to check
Example: 127.0.0.1:12004 127.0.0.1:12005
"""
def install(self):
# inter must be quite short in order to detect quickly an unresponsive node
# and to detect quickly a node which is back
# rise must be minimal possible : 1, indeed, a node which is back don't need
# to sleep more time and we can give him work immediately
# fall should be quite sort. with inter at 3, and fall at 2, a node will be
# considered as dead after 6 seconds.
# maxconn should be set as the maximum thread we have per zope, like this
# haproxy will manage the queue of request with the possibility to
# move a request to another node if the initially selected one is dead
# maxqueue is the number of waiting request in the queue of every zope client.
# It allows to make sure that there is not a zope client handling all
# the work while other clients are doing nothing. This was happening
# even thoug we have round robin distribution because when a node dies
# some seconds, all request are dispatched to other nodes, and then users
# stick in other nodes and are not coming back. Please note this option
# is not an issue if you have more than (maxqueue * node_quantity) requests
# because haproxy will handle a top-level queue
try:
backend_dict = self.options['backend-dict']
except KeyError:
backend_list = self.options['backend-list']
if isinstance(backend_list, str):
# BBB
backend_list = backend_list.split()
backend_dict = {
self.options['name']: (self.options['port'], backend_list),
}
server_snippet_filename = self.getTemplateFilename(
'haproxy-server-snippet.cfg.in')
listen_snippet_filename = self.getTemplateFilename(
'haproxy-listen-snippet.cfg.in')
server_snippet = ""
ip = self.options['ip']
server_check_path = self.options.get('server-check-path', None)
if server_check_path:
httpchk = 'option httpchk GET %s' % server_check_path
else:
httpchk = ''
# FIXME: maxconn must be provided per-backend, not globally
maxconn = self.options['maxconn']
i = 0
for name, (port, backend_list) in backend_dict.iteritems():
server_snippet += self.substituteTemplate(
listen_snippet_filename, {
'name': name,
'ip': ip,
'port': port,
'httpchk': httpchk,
})
for address in backend_list:
i += 1
server_snippet += self.substituteTemplate(
server_snippet_filename, {
'name': '%s_%s' % (name, i),
'address': address,
'cluster_zope_thread_amount': maxconn,
})
configuration_path = self.createFile(
self.options['conf-path'],
self.substituteTemplate(
self.getTemplateFilename('haproxy.cfg.in'),
{'socket_path': self.options['socket-path'],
'server_text': server_snippet},
)
)
wrapper_path = self.createWrapper(
self.options['wrapper-path'],
(self.options['binary-path'].strip(), '-f', configuration_path))
ctl_path = self.createPythonScript(
self.options['ctl-path'],
__name__ + '.haproxy.haproxyctl',
(self.options['socket-path'],))
return [configuration_path, wrapper_path, ctl_path]
import socket
try:
import readline
except ImportError:
pass
def haproxyctl(socket_path):
while True:
try:
l = raw_input('> ')
except EOFError:
print
break
if l == 'quit':
break
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(socket_path)
s.send('%s\n' % l)
while True:
r = s.recv(1024)
if not r:
break
print r
s.close()
listen %(name)s
bind %(ip)s:%(port)s
cookie SERVERID insert
balance roundrobin
%(httpchk)s
stats uri /haproxy
stats realm Global\ statistics
server %(name)s %(address)s cookie %(name)s check inter 3s rise 1 fall 2 maxqueue 5 maxconn %(cluster_zope_thread_amount)s
global
maxconn 4096
stats socket %(socket_path)s level admin
defaults
mode http
retries 1
option redispatch
maxconn 2000
# it is useless to have timeout much bigger than the one of apache.
# By default apache use 300s, so we set slightly more in order to
# make sure that apache will first stop the connection.
timeout server 305s
# Stop waiting in queue for a zope to become available.
# If no zope can be reached after one minute, consider the request will
# never succeed.
timeout queue 60s
# The connection should be immediate on LAN,
# so we should not set more than 5 seconds, and it could be already too much
timeout connect 5s
# As requested in haproxy doc, make this "at least equal to timeout server".
timeout client 305s
# Use "option httpclose" to not preserve client & server persistent connections
# while handling every incoming request individually, dispatching them one after
# another to servers, in HTTP close mode. This is really needed when haproxy
# is configured with maxconn to 1, without this options browser are unable
# to render a page
option httpclose
%(server_text)s
##############################################################################
#
# Copyright (c) 2012 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import pprint
import re
import shutil
import signal
import stat
import subprocess
from slapos.recipe.librecipe import GenericBaseRecipe
class Recipe(GenericBaseRecipe):
"""\
Configure a Mioga instance:
- copy over /var and /buildinst directories
- call "make install-all"
"""
def removeIfExisting(self, filepath):
if os.path.isfile(filepath):
os.remove(filepath)
def rsync_dir(self, src, target):
if os.path.isdir(src) and not src.endswith('/'):
src += '/'
cmd = subprocess.Popen(self.options['rsync_bin'] + '/rsync -a --specials '
+ src + ' ' + target,
env=os.environ, shell=True)
cmd.communicate()
# Even if there is a dedicated update(), this is still called sometimes.
# So better not trust that and decide for ourselves.
def install(self):
self.options['admin_password'] = 'test_for_programmatic_setting'
# Copy the build/ and var/lib/Mioga2 folders into the instance
mioga_location = self.options['mioga_location']
var_dir = self.options['var_directory']
self.rsync_dir(os.path.join(mioga_location, 'var'), var_dir)
buildinst_dir = self.options['buildinst_directory']
self.rsync_dir(self.options['mioga_buildinst'], buildinst_dir)
former_directory = os.getcwd()
os.chdir(buildinst_dir)
vardir = self.options['var_directory']
mioga_base = os.path.join(vardir, 'lib', 'Mioga2')
fm = FileModifier('conf/Config.xml')
fm.modifyParameter('init_sql', 'no') # force_init_sql is set manually everywhere
fm.modifyParameter('install_dir', mioga_base)
fm.modifyParameter('tmp_dir', os.path.join(mioga_base, 'tmp'))
fm.modifyParameter('search_tmp_dir', os.path.join(mioga_base, 'mioga_search'))
fm.modifyParameter('maildir', os.path.join(vardir, 'spool', 'mioga', 'maildir'))
fm.modifyParameter('maildirerror', os.path.join(vardir, 'spool', 'mioga', 'error'))
fm.modifyParameter('mailfifo', os.path.join(vardir, 'spool', 'mioga', 'fifo'))
notifier_fifo = os.path.join(vardir, 'spool', 'mioga', 'notifier')
fm.modifyParameter('notifierfifo', notifier_fifo)
searchengine_fifo = os.path.join(vardir, 'spool', 'mioga', 'searchengine')
fm.modifyParameter('searchenginefifo', searchengine_fifo)
fm.modifyParameter('dbi_passwd', self.options['db_password'])
fm.modifyParameter('db_host', self.options['db_host'])
fm.modifyParameter('db_port', self.options['db_port'])
fm.modifyParameter('dav_host', self.options['public_ipv6'])
fm.modifyParameter('dav_port', self.options['public_ipv6_port'])
fm.modifyParameter('bin_dir', self.options['bin_dir'])
# db_name, dbi_login are standard
fm.save()
# Ensure no old data is kept
self.removeIfExisting('config.mk')
# if os.path.isdir('web/conf/apache'):
# shutil.rmtree('web/conf/apache')
environ = os.environ
environ['PATH'] = ':'.join([self.options['perl_bin'], # priority!
# Mioga scripts in Makefiles and shell scripts
self.options['bin_dir'],
self.options['libxslt_bin'],
self.options['libxml2_bin'],
self.options['postgres_bin'],
self.options['rsync_bin'],
environ['PATH'] ])
environ['MIOGA_SITEPERL'] = self.options['mioga_siteperl']
# Write the Postgres password file
pgpassfilepath = os.path.join(self.options['instance_root'], '.pgpass')
pgpassfile = open(pgpassfilepath, 'w')
pgpassfile.write(':'.join([re.sub(r':', r'\:', self.options['db_host']),
self.options['db_port'],
'*', # could be self.options['db_dbname'] or 'postgres'
self.options['db_username'],
self.options['db_password'] ]) + "\n")
pgpassfile.close()
os.chmod(pgpassfilepath, stat.S_IRUSR | stat.S_IWUSR)
environ['PGPASSFILE'] = pgpassfilepath
# We must call "make" in the SAME environment that
# "perl Makefile.PL" left!
cmd = subprocess.Popen(self.options['perl_bin'] + '/perl Makefile.PL disable_check'
+ ' && make slapos-instantiation',
env=environ, shell=True)
cmd.communicate()
# Apache configuration!
# Take the files that Mioga has prepared, and wrap some standard configuration around it.
# TODO: can't we squeeze this somehow into the generic apacheperl recipe?
apache_config_mioga = '''
LoadModule alias_module modules/mod_alias.so
LoadModule apreq_module modules/mod_apreq2.so
LoadModule auth_basic_module modules/mod_auth_basic.so
LoadModule authz_default_module modules/mod_authz_default.so
LoadModule authz_host_module modules/mod_authz_host.so
LoadModule authz_user_module modules/mod_authz_user.so
LoadModule autoindex_module modules/mod_autoindex.so
LoadModule dav_module modules/mod_dav.so
LoadModule dav_fs_module modules/mod_dav_fs.so
LoadModule dav_lock_module modules/mod_dav_lock.so
LoadModule deflate_module modules/mod_deflate.so
LoadModule dir_module modules/mod_dir.so
LoadModule env_module modules/mod_env.so
LoadModule headers_module modules/mod_headers.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule mime_module modules/mod_mime.so
LoadModule perl_module modules/mod_perl.so
# Basic server configuration
PidFile REPL_PID
Listen [REPL_IPV6HOST]:REPL_IPV6PORT
Listen REPL_IPV4HOST:REPL_IPV6PORT
# Listen [REPL_IPV6]:443 # what about mod_ssl and all that stuff?
# ServerAdmin someone@email
# Log configuration
ErrorLog REPL_ERRORLOG
LogLevel debug
LogFormat "%h %{REMOTE_USER}i %l %u %t \\"%r\\" %>s %b \\"%{Referer}i\\" \\"%{User-Agent}i\\"" combined
LogFormat "%h %{REMOTE_USER}i %l %u %t \\"%r\\" %>s %b" common
CustomLog REPL_ACCESSLOG common
DocumentRoot REPL_DOCROOT
DirectoryIndex index.html
DavLockDB REPL_DAVLOCK
Include conf/extra/httpd-autoindex.conf
'''
apache_config_mioga = (apache_config_mioga
.replace('REPL_PID', self.options['pid_file'])
.replace('REPL_IPV6HOST', self.options['public_ipv6'])
.replace('REPL_IPV4HOST', self.options['private_ipv4'])
.replace('REPL_IPV6PORT', self.options['public_ipv6_port'])
.replace('REPL_ERRORLOG', self.options['error_log'])
.replace('REPL_ACCESSLOG', self.options['access_log'])
.replace('REPL_DOCROOT', self.options['htdocs'])
.replace('REPL_STATIC', os.path.join(mioga_base, 'static'))
.replace('REPL_DAVLOCK', self.options['dav_locks']) )
mioga_prepared_apache_config_dir = os.path.join(mioga_base, 'conf', 'apache')
for filepath in os.listdir(mioga_prepared_apache_config_dir):
apache_config_mioga += ("# Read in from "+filepath+"\n" +
open(os.path.join(mioga_prepared_apache_config_dir, filepath)).read() + "\n" )
# Internal DAV only accepts its own addresses
apache_config_mioga = re.sub(
'Allow from localhost',
"Allow from "+self.options['private_ipv4']+"\n\tAllow from "+self.options['public_ipv6'],
apache_config_mioga)
path_list = []
open(self.options['httpd_conf'], 'w').write(apache_config_mioga)
# TODO: if that all works fine, put it into a proper template
# httpd_conf = self.createFile(self.options['httpd_conf'],
# self.substituteTemplate(self.getTemplateFilename('apache.in'),
# apache_config)
# )
path_list.append(os.path.abspath(self.options['httpd_conf']))
services_dir = self.options['services_dir']
httpd_wrapper = self.createWrapper(
os.path.join(services_dir, 'httpd_wrapper'),
(self.options['httpd_binary'],
'-f', self.options['httpd_conf'], '-DFOREGROUND'),
)
path_list.append(httpd_wrapper)
for fifo in [notifier_fifo, searchengine_fifo]:
if os.path.exists(fifo):
if not stat.S_ISFIFO(os.stat(fifo).st_mode):
raise Exception("The file "+fifo+" exists but is not a FIFO.")
else:
os.mkfifo(fifo, 0600)
site_perl_bin = os.path.join(self.options['site_perl'], 'bin')
mioga_conf_path = os.path.join(mioga_base, 'conf', 'Mioga.conf')
notifier_wrapper = self.createWrapper(
os.path.join(services_dir, 'notifier'),
(os.path.join(site_perl_bin, 'notifier.pl'),
mioga_conf_path),
)
path_list.append(notifier_wrapper)
searchengine_wrapper = self.createWrapper(
os.path.join(services_dir, 'searchengine'),
(os.path.join(site_perl_bin, 'searchengine.pl'),
mioga_conf_path),
)
path_list.append(searchengine_wrapper)
crawl_fm = FileModifier( os.path.join('bin', 'search', 'crawl_sample.sh') )
# TODO: The crawl script will still call the shell command "date"
crawl_fm.modify(r'/var/tmp/crawl', self.options['log_dir'] + '/crawl')
crawl_fm.modify(r'/var/lib/Mioga2/conf', mioga_base + '/conf')
crawl_fm.modify(r'/usr/local/bin/(mioga2_(?:info|crawl|index).pl)',
site_perl_bin + r"/\g<1>")
crawl_path = os.path.join(self.options['bin_dir'], 'crawl.sh')
crawl_fm.save(crawl_path)
os.chmod(crawl_path, stat.S_IRWXU)
if os.path.exists(self.options['pid_file']):
# Reload apache configuration
with open(self.options['pid_file']) as pid_file:
pid = int(pid_file.read().strip(), 10)
try:
os.kill(pid, signal.SIGUSR1) # Graceful restart
except OSError:
pass
os.chdir(former_directory)
print "Mioga instantiate.py::install finished!"
return path_list
# Copied and adapted from mioga-hooks.py - how to reuse code?
class FileModifier:
def __init__(self, filename):
self.filename = filename
f = open(filename, 'rb')
self.content = f.read()
f.close()
def modifyParameter(self, key, value):
(self.content, count) = re.subn(
r'(<parameter[^>]*\sname\s*=\s*"' + re.escape(key) + r'"[^>]*\sdefault\s*=\s*")[^"]*',
r"\g<1>" + value,
self.content)
return count
def modify(self, pattern, replacement):
(self.content, count) = re.subn(pattern, replacement, self.content)
return count
def save(self, output=""):
if output == "":
output = self.filename
f = open(output, 'w')
f.write(self.content)
f.close()
...@@ -19,6 +19,14 @@ parts = ...@@ -19,6 +19,14 @@ parts =
part = python2.7 part = python2.7
[open62541] [open62541]
configure-options =
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=@@LOCATION@@
-DUA_ENABLE_PUBSUB=ON
-DUA_ENABLE_SUBSCRIPTIONS=ON
-DUA_NAMESPACE_ZERO=REDUCED
# Beremiz need it to be in folder parts/open62541 # Beremiz need it to be in folder parts/open62541
# as Beremiz search for open62541 to BEREMIZ_PATH/../open62541 # as Beremiz search for open62541 to BEREMIZ_PATH/../open62541
shared = false shared = false
......
...@@ -119,6 +119,11 @@ class ERP5UpgradeTestCase(SlapOSInstanceTestCase): ...@@ -119,6 +119,11 @@ class ERP5UpgradeTestCase(SlapOSInstanceTestCase):
class TestERP5Upgrade(ERP5UpgradeTestCase): class TestERP5Upgrade(ERP5UpgradeTestCase):
@classmethod
def tearDownClass(cls):
cls.session.close()
super().tearDownClass()
@classmethod @classmethod
def setUpOldInstance(cls): def setUpOldInstance(cls):
cls._default_instance_old_parameter_dict = param_dict = json.loads( cls._default_instance_old_parameter_dict = param_dict = json.loads(
......
...@@ -22,13 +22,10 @@ parts = ...@@ -22,13 +22,10 @@ parts =
git git
apache apache
[python]
part = python3
[eggs] [eggs]
recipe = zc.recipe.egg recipe = zc.recipe.egg
eggs = eggs =
erp5.util[testnode,benchmark] erp5.util[testnode]
${lxml-python:egg} ${lxml-python:egg}
...@@ -41,20 +38,3 @@ output = ${buildout:directory}/template.cfg ...@@ -41,20 +38,3 @@ output = ${buildout:directory}/template.cfg
recipe = slapos.recipe.template recipe = slapos.recipe.template
url = ${:_profile_base_location_}/${:filename} url = ${:_profile_base_location_}/${:filename}
output = ${buildout:directory}/template-default.cfg.jinja output = ${buildout:directory}/template-default.cfg.jinja
[versions]
dnspython = 1.15.0
PyXML = 0.8.5
WebOb = 1.8.5
WebTest = 2.0.33
soupsieve = 1.8
waitress = 1.4.4
z3c.etestbrowser = 3.0.1
zope.testbrowser = 5.3.2
WSGIProxy2 = 0.4.6
beautifulsoup4 = 4.7.1
zope.cachedescriptors = 4.3.1
zope.event = 4.4
zope.schema = 4.9.3
zope.deferredimport = 4.3.1
zope.proxy = 4.3.5
...@@ -189,7 +189,7 @@ def main(): ...@@ -189,7 +189,7 @@ def main():
assert revision == test_result.revision, (revision, test_result.revision) assert revision == test_result.revision, (revision, test_result.revision)
while suite.acquire(): while suite.acquire():
test = test_result.start(suite.running.keys()) test = test_result.start(list(suite.running.keys()))
if test is not None: if test is not None:
suite.start(test.name, lambda status_dict, suite.start(test.name, lambda status_dict,
__test=test: __test.stop(**status_dict)) __test=test: __test.stop(**status_dict))
......
...@@ -26,5 +26,8 @@ The following instance parameters can be configured: ...@@ -26,5 +26,8 @@ The following instance parameters can be configured:
- remote-debugging-port: Port for Chromium to listen on. - remote-debugging-port: Port for Chromium to listen on.
- nginx-proxy-port: Port for Ningx proxy to listen on. - nginx-proxy-port: Port for Ningx proxy to listen on.
- monitor-httpd-port: Port for monitor. - monitor-httpd-port: Port for monitor.
- incognito: Force Incognito mode
- window-size: Initial windo size
- block-new-web-contents: Block new web contents
See `instance-headless-chromium-input-schema.json` for default values. See `instance-headless-chromium-input-schema.json` for default values.
[template-cfg] [template-cfg]
filename = instance.cfg.in filename = instance.cfg.in
md5sum = 4d73fe3c5d286163974bdb79c838e030 md5sum = 6315598b2c7c19f9e2d9cdf090492e2c
[instance-headless-chromium] [instance-headless-chromium]
_update_hash_filename_ = instance-headless-chromium.cfg.in _update_hash_filename_ = instance-headless-chromium.cfg.in
md5sum = d72e5f6e159081f1c204ceb7ec0c0caf md5sum = a350b7b5ac03971f1b8d66949905c164
[template-nginx-conf] [template-nginx-conf]
_update_hash_filename_ = templates/nginx.conf.in _update_hash_filename_ = templates/nginx.conf.in
md5sum = c4d09d2b819f624087ef4c38551dfe2f md5sum = 1f35f91fa7e490cd1e2194264a8a6ed8
[template-mime-types] [template-mime-types]
_update_hash_filename_ = templates/mime-types.in _update_hash_filename_ = templates/mime-types.in
......
{ {
"type": "object", "type": "object",
"$schema": "http://json-schema.org/draft-04/schema", "$schema": "http://json-schema.org/draft-04/schema",
"required": [
"target-url"
],
"title": "Input Parameters", "title": "Input Parameters",
"properties": { "properties": {
"target-url": { "target-url": {
"description": "URL for Chromium to load on startup.", "description": "Web site URL to load on headless chromium.",
"title": "Target URL", "title": "Application target URL",
"type": "string", "type": "string"
"default": "https://www.example.com"
}, },
"remote-debugging-port": { "remote-debugging-port": {
"description": "Port for Chromium to listen on.", "description": "Port for Chromium to listen on.",
"title": "Remote Debugging Port", "title": "Remote Debugging Port",
"type": "integer", "type": "integer",
"default": 8081 "default": 9222
}, },
"nginx-proxy-port": { "nginx-proxy-port": {
"description": "Port for Nginx proxy to listen on.", "description": "Port for Nginx proxy to listen on.",
"title": "Nginx Proxy Port", "title": "Nginx Proxy Port",
"type": "integer", "type": "integer",
"default": 8082 "default": 9224
},
"incognito": {
"description": "Force Incognito mode",
"title": "Force Incognito mode",
"type": "boolean",
"default": true
},
"window-size": {
"description": "Set the initial window size. Provided as string in the format \"800,600\".",
"title": "Initial windo size",
"type": "string",
"default": "800,600"
},
"block-new-web-contents": {
"description": "If true, then all pop-ups and calls to window.open will fail.",
"title": "Block new web contents",
"type": "boolean",
"default": false
}, },
"monitor-httpd-port": { "monitor-httpd-port": {
"description": "Port for monitor frontend.", "description": "Port for monitor frontend.",
......
{% set parameter_dict = dict(default_parameter_dict, **slapparameter_dict) %} {% set parameter_dict = dict(default_parameter_dict, **slapparameter_dict) %}
[buildout]
parts =
chromium-launcher
generate-passwd-file
nginx-config
nginx-mime-types
nginx-launcher
logrotate-entry-nginx
publish-connection-information
frontend-ok-promise
frontend-secure-promise
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
extends = {{ parameter_list['template-monitor'] }}
# Create necessary directories. # Create necessary directories.
[directory] [directory]
recipe = slapos.cookbook:mkdirectory recipe = slapos.cookbook:mkdirectory
...@@ -34,7 +16,10 @@ service = ${:etc}/service ...@@ -34,7 +16,10 @@ service = ${:etc}/service
ipv4 = {{ partition_ipv4 }} ipv4 = {{ partition_ipv4 }}
ipv6 = {{ partition_ipv6 }} ipv6 = {{ partition_ipv6 }}
remote-debugging-port = {{ parameter_dict['remote-debugging-port'] }} remote-debugging-port = {{ parameter_dict['remote-debugging-port'] }}
url = {{ parameter_dict['target-url'] }} target-url = {{ parameter_dict['target-url'] }}
incognito = {{ parameter_dict['incognito'] }}
window-size = {{ parameter_dict['window-size'] }}
block-new-web-contents = {{ parameter_dict['block-new-web-contents'] }}
remote-debugging-address = ${:ipv4}:${:remote-debugging-port} remote-debugging-address = ${:ipv4}:${:remote-debugging-port}
devtools-frontend-root = {{ parameter_list['devtools-frontend'] }} devtools-frontend-root = {{ parameter_list['devtools-frontend'] }}
...@@ -51,19 +36,29 @@ nginx-cert-file = ${frontend-instance-certificate:cert-file} ...@@ -51,19 +36,29 @@ nginx-cert-file = ${frontend-instance-certificate:cert-file}
nginx-mime-types = ${directory:etc}/mime-types nginx-mime-types = ${directory:etc}/mime-types
# Create a launcher script in /etc/service for the headless shell # Create a wrapper script in /bin/chromium for the headless shell
# executable. # executable.
[chromium-launcher] [chromium-wrapper]
recipe = slapos.recipe.template recipe = slapos.cookbook:wrapper
inline = wrapper-path = ${directory:bin}/chromium
#!/bin/sh command-line =
{{ parameter_list['chromium-wrapper'] }}
--remote-debugging-address=${headless-chromium:ipv4}
--remote-debugging-port=${headless-chromium:remote-debugging-port}
--user-data-dir=${directory:tmp}
--window-size="${headless-chromium:window-size}"
{% if parameter_dict['incognito'] %}--incognito{% endif -%}
{% if parameter_dict['block-new-web-contents'] %}--block-new-web-contents{% endif -%}
{{ '\n "${headless-chromium:target-url}"' }}
environment =
FONTCONFIG_FILE=${font-config:output}
export FONTCONFIG_FILE=${font-config:output} [chromium-launcher]
exec {{ parameter_list['chromium-wrapper'] }} \ recipe = slapos.cookbook:wrapper
--remote-debugging-address=${headless-chromium:ipv4} \ command-line = ${chromium-wrapper:wrapper-path}
--remote-debugging-port=${headless-chromium:remote-debugging-port} \ wrapper-path = ${directory:service}/chromium
${headless-chromium:url} hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
output = ${directory:service}/chromium hash-files = ${chromium-wrapper:wrapper-path}
# Configure and launch the proxy server. # Configure and launch the proxy server.
...@@ -81,7 +76,9 @@ output = ${headless-chromium:nginx-mime-types} ...@@ -81,7 +76,9 @@ output = ${headless-chromium:nginx-mime-types}
[nginx-launcher] [nginx-launcher]
recipe = slapos.cookbook:wrapper recipe = slapos.cookbook:wrapper
command-line = {{ parameter_list['nginx-location'] }}/sbin/nginx -c ${headless-chromium:nginx-config-target} command-line =
{{ parameter_list['nginx-location'] }}/sbin/nginx
-c ${headless-chromium:nginx-config-target}
wrapper-path = ${directory:service}/nginx wrapper-path = ${directory:service}/nginx
[logrotate-entry-nginx] [logrotate-entry-nginx]
...@@ -189,3 +186,22 @@ name = headless-chromium-frontend-secure.py ...@@ -189,3 +186,22 @@ name = headless-chromium-frontend-secure.py
url = ${remote-debugging-frontend:connection-secure_access} url = ${remote-debugging-frontend:connection-secure_access}
config-url = ${:url} config-url = ${:url}
config-http-code = 401 config-http-code = 401
[buildout]
extends = {{ parameter_list['template-monitor'] }}
parts =
chromium-launcher
generate-passwd-file
nginx-config
nginx-mime-types
nginx-launcher
logrotate-entry-nginx
remote-debugging-frontend
publish-connection-information
frontend-ok-promise
frontend-secure-promise
eggs-directory = {{ buildout['eggs-directory'] }}
develop-eggs-directory = {{ buildout['develop-eggs-directory'] }}
offline = true
...@@ -32,8 +32,11 @@ context = ...@@ -32,8 +32,11 @@ context =
jsonkey default_parameter_dict :default-parameters jsonkey default_parameter_dict :default-parameters
default-parameters = default-parameters =
{ {
"remote-debugging-port": 8081, "remote-debugging-port": 9222,
"nginx-proxy-port": 8082, "nginx-proxy-port": 9224,
"incognito": true,
"window-size": "800,600",
"block-new-web-contents": false,
"target-url": "https://www.example.com", "target-url": "https://www.example.com",
"monitor-httpd-port": 8083 "monitor-httpd-port": 8083
} }
......
pid {{ param_headless_chromium['nginx-pid-path'] }}; pid {{ param_headless_chromium['nginx-pid-path'] }};
error_log {{ param_headless_chromium['nginx-error-log'] }}; error_log {{ param_headless_chromium['nginx-error-log'] }};
daemon off;
events { events {
worker_connections 1024; worker_connections 1024;
} }
...@@ -11,12 +13,6 @@ http { ...@@ -11,12 +13,6 @@ http {
include {{ param_headless_chromium['nginx-mime-types'] }}; include {{ param_headless_chromium['nginx-mime-types'] }};
default_type application/octet-stream; default_type application/octet-stream;
types {
text/html html;
text/css css;
application/javascript js;
}
server { server {
listen {{ param_headless_chromium['proxy-address'] }} ssl; listen {{ param_headless_chromium['proxy-address'] }} ssl;
......
...@@ -68,3 +68,26 @@ class TestHeadlessChromium(SlapOSInstanceTestCase): ...@@ -68,3 +68,26 @@ class TestHeadlessChromium(SlapOSInstanceTestCase):
response = requests.get(proxyURL + frontend, verify=False, response = requests.get(proxyURL + frontend, verify=False,
auth=(username, password)) auth=(username, password))
self.assertEqual(requests.codes['ok'], response.status_code) self.assertEqual(requests.codes['ok'], response.status_code)
class TestHeadlessChromiumParameters(SlapOSInstanceTestCase):
instance_parameter_dict = {
# this website echoes the get request for debugging purposes
'target-url': 'https://httpbin.org/get?a=6&b=4',
'incognito': True,
"block-new-web-contents": False,
"window-size": "900,600"
}
@classmethod
def getInstanceParameterDict(cls):
return cls.instance_parameter_dict
def setUp(self):
self.connection_parameters = self.requestDefaultInstance().getConnectionParameterDict()
def test_chromium_loads_target_url_parameter(self):
url = self.connection_parameters['remote-debug-url']
response = requests.get('%s/json' % url)
loaded_url = response.json()[0]['url']
self.assertEqual(loaded_url, self.instance_parameter_dict['target-url'])
...@@ -72,7 +72,6 @@ notebook = 6.1.5 ...@@ -72,7 +72,6 @@ notebook = 6.1.5
numpy = 1.14.6 numpy = 1.14.6
pandas = 0.25.3 pandas = 0.25.3
pandocfilters = 1.4.3 pandocfilters = 1.4.3
plone.recipe.command = 1.1
prompt-toolkit = 1.0.13 prompt-toolkit = 1.0.13
ptyprocess = 0.5.1 ptyprocess = 0.5.1
pyzmq = 20.0.0 pyzmq = 20.0.0
...@@ -86,7 +85,6 @@ tornado = 6.1 ...@@ -86,7 +85,6 @@ tornado = 6.1
traitlets = 5.0.5 traitlets = 5.0.5
webencodings = 0.5.1 webencodings = 0.5.1
widgetsnbextension = 2.0.0 widgetsnbextension = 2.0.0
certifi = 2020.6.20
Send2Trash = 1.5.0 Send2Trash = 1.5.0
argon2-cffi = 20.1.0 argon2-cffi = 20.1.0
nbconvert = 6.0.7 nbconvert = 6.0.7
......
...@@ -162,18 +162,19 @@ class EdgeMixin(object): ...@@ -162,18 +162,19 @@ class EdgeMixin(object):
) )
for instance_reference in self.surykatka_dict: for instance_reference in self.surykatka_dict:
for info_dict in self.surykatka_dict[instance_reference].values(): for info_dict in self.surykatka_dict[instance_reference].values():
self.assertEqual( with open(info_dict['ini-file']) as fh:
info_dict['expected_ini'].strip() % info_dict, self.assertEqual(
open(info_dict['ini-file']).read().strip() info_dict['expected_ini'].strip() % info_dict,
) fh.read().strip()
)
def assertPromiseContent(self, instance_reference, name, content): def assertPromiseContent(self, instance_reference, name, content):
promise = open( with open(
os.path.join( os.path.join(
self.slap.instance_directory, instance_reference, 'etc', 'plugin', name self.slap.instance_directory, instance_reference, 'etc', 'plugin', name
)).read().strip() )) as fh:
promise = fh.read().strip()
self.assertTrue(content in promise) self.assertIn(content, promise)
def assertSurykatkaBotPromise(self): def assertSurykatkaBotPromise(self):
for instance_reference in self.surykatka_dict: for instance_reference in self.surykatka_dict:
...@@ -190,10 +191,11 @@ class EdgeMixin(object): ...@@ -190,10 +191,11 @@ class EdgeMixin(object):
def assertSurykatkaCron(self): def assertSurykatkaCron(self):
for instance_reference in self.surykatka_dict: for instance_reference in self.surykatka_dict:
for info_dict in self.surykatka_dict[instance_reference].values(): for info_dict in self.surykatka_dict[instance_reference].values():
self.assertEqual( with open(info_dict['status-cron']) as fh:
'*/2 * * * * %s' % (info_dict['status-json'],), self.assertEqual(
open(info_dict['status-cron']).read().strip() '*/2 * * * * %s' % (info_dict['status-json'],),
) fh.read().strip()
)
def initiateSurykatkaRun(self): def initiateSurykatkaRun(self):
try: try:
......
...@@ -129,7 +129,6 @@ coverage = 4.5.1 ...@@ -129,7 +129,6 @@ coverage = 4.5.1
ecdsa = 0.13 ecdsa = 0.13
mysqlclient = 1.3.12 mysqlclient = 1.3.12
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0
cython-zstd = 0.2 cython-zstd = 0.2
funcsigs = 1.0.2 funcsigs = 1.0.2
......
...@@ -46,6 +46,7 @@ setup(name=name, ...@@ -46,6 +46,7 @@ setup(name=name,
'slapos.libnetworkcache', 'slapos.libnetworkcache',
'erp5.util', 'erp5.util',
'supervisor', 'supervisor',
'pillow',
'psutil', 'psutil',
'plantuml', 'plantuml',
'requests' 'requests'
......
...@@ -48,7 +48,7 @@ setup( ...@@ -48,7 +48,7 @@ setup(
'erp5.util', 'erp5.util',
'selenium', 'selenium',
'psutil', 'psutil',
'image', 'pillow',
'requests', 'requests',
'paramiko', 'paramiko',
], ],
......
...@@ -394,32 +394,24 @@ tests = ...@@ -394,32 +394,24 @@ tests =
[versions] [versions]
# slapos.core is used from the clone always # slapos.core is used from the clone always
slapos.core = slapos.core =
# Various needed versions # Various needed versions
Pillow = 5.3.0 Pillow = 9.2.0
PyNaCl = 1.3.0
bcrypt = 3.1.4
forcediphttpsadapter = 1.0.1 forcediphttpsadapter = 1.0.1
httplib2 = 0.11.3 httplib2 = 0.20.4
image = 1.5.25 image = 1.5.25
paramiko = 2.4.2
plantuml = 0.3.0:whl plantuml = 0.3.0:whl
pysftp = 0.2.9 pysftp = 0.2.9
requests-toolbelt = 0.8.0 requests-toolbelt = 0.8.0
selenium = 3.141.0 selenium = 3.141.0
# Patched eggs
PyPDF2 = 1.26.0+SlapOSPatched001
# Django 1.11 is python 2 compatible
Django = 1.11
testfixtures = 6.11.0 testfixtures = 6.11.0
funcsigs = 1.0.2 mysqlclient = 2.1.1
mysqlclient = 1.3.12
pexpect = 4.8.0 pexpect = 4.8.0
ptyprocess = 0.6.0 ptyprocess = 0.6.0
psycopg2 = 2.8.6 psycopg2 = 2.8.6
# Patched eggs
PyPDF2 = 1.26.0+SlapOSPatched001
# Test Suite: SlapOS.SoftwareReleases.IntegrationTest-Master.Python2 ran at 2022/09/08 02:05:35.783873 UTC # Test Suite: SlapOS.SoftwareReleases.IntegrationTest-Master.Python2 ran at 2022/09/08 02:05:35.783873 UTC
# 2 failures, 0 errors, 1037 total, status: FAIL # 2 failures, 0 errors, 1037 total, status: FAIL
...@@ -430,4 +422,3 @@ revision = b696fd3ddd3364b5807310afa2bc677c1ced773a ...@@ -430,4 +422,3 @@ revision = b696fd3ddd3364b5807310afa2bc677c1ced773a
[slapos.core] [slapos.core]
revision = a5335308538e250626c5d95e5b13cd3f2484c6c4 revision = a5335308538e250626c5d95e5b13cd3f2484c6c4
...@@ -244,19 +244,12 @@ slapos.toolbox = ...@@ -244,19 +244,12 @@ slapos.toolbox =
rubygemsrecipe = rubygemsrecipe =
# All other depencies should be pinned. # All other depencies should be pinned.
Pygments = 2.1.3 Pygments = 2.13.0
zc.lockfile = 1.4
bcrypt = 3.1.4
dnspython = 1.15.0
funcsigs = 1.0.2
httmock = 1.2.6 httmock = 1.2.6
manuel = 1.9.0 manuel = 1.9.0
mock = 2.0.0:whl
testfixtures = 6.11.0 testfixtures = 6.11.0
pycurl = 7.43.0.2
pyflakes = 2.0.0 pyflakes = 2.0.0
zope.testing = 4.6.2 zope.testing = 4.6.2
urllib3 = 1.24.1
pathlib = 1.0.1 pathlib = 1.0.1
psycopg2 = 2.8.6 psycopg2 = 2.8.6
iniparse = 0.5 iniparse = 0.5
[buildout]
parts =
postgres-urlparse
# apacheperl-promise
mioga-instance
cron-entry-crawler
sshkeys-dropbear
dropbear-server-add-authorized-key
sshkeys-authority
publish-connection-information
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
srv = $${buildout:directory}/srv
log = $${buildout:directory}/log
var = $${buildout:directory}/var
buildinst = $${buildout:directory}/buildinst
[basedirectory]
recipe = slapos.cookbook:mkdirectory
services = $${rootdirectory:etc}/run
promises = $${rootdirectory:etc}/promise
htdocs = $${rootdirectory:srv}/htdocs
cronstamps = $${rootdirectory:etc}/cronstamps/
cron-entries = $${rootdirectory:etc}/cron.d/
crontabs = $${rootdirectory:etc}/crontabs/
sshkeys = $${rootdirectory:srv}/sshkeys
ssh = $${rootdirectory:etc}/ssh
[cron-simplelogger]
recipe = slapos.cookbook:simplelogger
wrapper = $${rootdirectory:bin}/cron_simplelogger
log = $${rootdirectory:log}/crond.log
[cron]
recipe = slapos.cookbook:cron
dcrond-binary = ${dcron:location}/sbin/crond
cronstamps = $${basedirectory:cronstamps}
cron-entries = $${basedirectory:cron-entries}
crontabs = $${basedirectory:crontabs}
catcher = $${cron-simplelogger:wrapper}
binary = $${basedirectory:services}/crond
[cron-entry-crawler]
<= cron
recipe = slapos.cookbook:cron.d
name = logrotate
frequency = 23 50 * * *
command = $${mioga-instance:bin_dir}/crawl.sh
[apacheperl-promise]
recipe = slapos.cookbook:check_port_listening
path = $${basedirectory:promises}/httpd_promise
hostname = $${apacheperl-instance:ip}
port = $${apacheperl-instance:port}
[publish-connection-information]
recipe = slapos.cookbook:publish
direct_url = $${mioga-url:direct_url}
ssh_command = ssh $${dropbear-server:host} -p $${dropbear-server:port}
url = $${request-frontend:connection-site_url}
# Request POSTGRES INSTANCE and parse its URL
[request-postgres]
<= slap-connection
recipe = slapos.cookbook:request
name = Postgres
software-url = $${slap-connection:software-release-url}
software-type = postgres
return = url
sla-computer_guid = $${slap-connection:computer-id}
[postgres-urlparse]
recipe = slapos.cookbook:urlparse
url = $${request-postgres:connection-url}
[symlinks]
recipe = cns.recipe.symlink
symlink_target = $${rootdirectory:bin}
symlink_base = ${postgresql:location}/bin
# SSH SERVER
[sshkeys-directory]
recipe = slapos.cookbook:mkdirectory
requests = $${basedirectory:sshkeys}/requests/
keys = $${basedirectory:sshkeys}/keys/
[sshkeys-authority]
recipe = slapos.cookbook:sshkeys_authority
request-directory = $${sshkeys-directory:requests}
keys-directory = $${sshkeys-directory:keys}
wrapper = $${basedirectory:services}/sshkeys_authority
keygen-binary = ${dropbear:location}/bin/dropbearkey
[dropbear-server]
recipe = slapos.cookbook:dropbear
host = $${slap-network-information:global-ipv6}
port = 2222
home = $${basedirectory:ssh}
wrapper = $${rootdirectory:bin}/raw_sshd
shell = /bin/bash
rsa-keyfile = $${basedirectory:ssh}/server_key.rsa
dropbear-binary = ${dropbear:location}/sbin/dropbear
[sshkeys-dropbear]
<= sshkeys-authority
recipe = slapos.cookbook:sshkeys_authority.request
name = dropbear
type = rsa
executable = $${dropbear-server:wrapper}
public-key = $${dropbear-server:rsa-keyfile}.pub
private-key = $${dropbear-server:rsa-keyfile}
wrapper = $${basedirectory:services}/sshd
[dropbear-server-add-authorized-key]
<= dropbear-server
recipe = slapos.cookbook:dropbear.add_authorized_key
key = $${slap-parameter:authorized-key}
[slap-parameter]
# Default value if no ssh key is specified
authorized-key =
# IPv4 AND PORT 80 FRONTEND
[request-frontend]
<= slap-connection
recipe = slapos.cookbook:requestoptional
name = Frontend
# XXX We have hardcoded SR URL here.
software-url = http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/apache-frontend/software.cfg
slave = true
config-url = $${mioga-url:direct_url}
return = site_url
# MIOGA INSTANCE
[mioga-instance]
recipe = slapos.cookbook:mioga.instantiate
# mioga_compile_dir = ${template-apacheperl:compile-directory}
# Pity that the following line does not work. Or does it?
# mioga_compile_env = ${mioga:environment}
mioga_location = ${mioga:location}
mioga_buildinst = ${mioga:buildinst}
libxslt_bin = ${libxslt:location}/bin
libxml2_bin = ${libxml2:location}/bin
rsync_bin = ${rsync:location}/bin
var_directory = $${rootdirectory:var}
buildinst_directory = $${rootdirectory:buildinst}
instance_root = $${buildout:directory}
perl_bin = ${perl:location}/bin
postgres_bin = ${postgresql:location}/bin
htdocs = $${apacheperl-instance:htdocs}
db_host = $${postgres-urlparse:host}
db_port = $${postgres-urlparse:port}
db_dbname = $${postgres-urlparse:path}
db_username = $${postgres-urlparse:username}
db_password = $${postgres-urlparse:password}
public_ipv6 = $${slap-network-information:global-ipv6}
public_ipv6_port = 8080
private_ipv4 = $${slap-network-information:local-ipv4}
httpd_binary = ${apache-2.2:location}/bin/httpd
path = $${basedirectory:services}/apacheperl
htdocs = $${basedirectory:htdocs}
httpd_conf = $${rootdirectory:etc}/httpd.conf
pid_file = $${basedirectory:services}/apache.pid
lock_file = $${basedirectory:services}/apache.lock
dav_locks = $${buildout:directory}/var/dav_locks
services_dir = $${basedirectory:services}
error_log = $${rootdirectory:log}/error.log
access_log = $${rootdirectory:log}/access.log
bin_dir = $${rootdirectory:bin}
log_dir = $${rootdirectory:log}
site_perl = ${perl:siteprefix}
[mioga-url]
direct_url = http://[$${slap-network-information:global-ipv6}]:$${mioga-instance:public_ipv6_port}
\ No newline at end of file
[buildout]
parts =
symlinks
publish
postgres-instance
postgres-promise
# Define egg directories to be the one from Software Release
# (/opt/slapgrid/...)
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[instance-parameters]
# Fetches parameters defined in SlapOS Master for this instance
recipe = slapos.cookbook:slapconfiguration
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[rootdirectory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
services = $${rootdirectory:etc}/run/
promises = $${rootdirectory:etc}/promise/
var = $${buildout:directory}/var
[symlinks]
recipe = cns.recipe.symlink
symlink_target = $${rootdirectory:bin}
symlink_base = ${postgresql:location}/bin
[postgres-instance]
# create cluster, configuration files and a database
recipe = slapos.cookbook:postgres
# Options
ipv6_host = $${slap-network-information:global-ipv6}
user = mioga
port = 5432
dbname = mioga2
# pgdata_directory is created by initdb, and should not exist beforehand.
pgdata-directory = $${rootdirectory:var}/data
services = $${rootdirectory:services}
bin = $${rootdirectory:bin}
# Deploy promises scripts
[postgres-promise]
recipe = slapos.cookbook:check_port_listening
path = $${rootdirectory:promises}/postgres
hostname = $${slap-network-information:global-ipv6}
port = $${postgres-instance:port}
[publish]
recipe = slapos.cookbook:publishurl
url = $${postgres-instance:url}
[buildout]
parts =
switch-softwaretype
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
offline = true
[switch-softwaretype]
recipe = slapos.cookbook:softwaretype
default = ${template-apacheperl:output}
postgres = ${template-postgres:output}
import fnmatch
import grp
import os
import pprint
import pwd
import re
import shutil
import sys
class FileModifier:
def __init__(self, filename):
self.filename = filename
f = open(filename, 'rb')
self.content = f.read()
f.close()
def modify(self, key, value):
(self.content, count) = re.subn(
r'(<parameter[^>]*\sname\s*=\s*"' + re.escape(key) + r'"[^>]*\sdefault\s*=\s*")[^"]*',
r"\g<1>" + value,
self.content)
return count
def save(self):
f = open(self.filename, 'w')
f.write(self.content)
f.close()
def pre_configure_hook(options, bo, env):
location = options['location']
# TODO: double-check which one of these values must be set
# at instantiation time!
fm = FileModifier('conf/Config.xml')
fm.modify('apache_user', pwd.getpwuid(os.getuid())[0])
fm.modify('apache_group', grp.getgrgid(os.getgid())[0])
mioga_base = os.path.join(location, 'var', 'lib', 'Mioga2')
fm.modify('install_dir', mioga_base)
fm.modify('tmp_dir', os.path.join(mioga_base, 'tmp'))
fm.modify('search_tmp_dir', os.path.join(mioga_base, 'mioga_search'))
fm.modify('maildir', os.path.join(location, 'var', 'spool', 'mioga', 'maildir'))
fm.modify('maildirerror', os.path.join(location, 'var', 'spool', 'mioga', 'error'))
fm.modify('mailfifo', os.path.join(location, 'var', 'spool', 'mioga', 'fifo'))
fm.save()
# TODO: mail settings are certainly wrong, what is the domain name?
# Correct shebangs to the right Perl
for root, dirnames, filenames in os.walk('.'):
for filename in fnmatch.filter(filenames, '*.pl'):
with open(os.path.join(root, filename), "r+") as f:
lines = f.readlines()
(lines[0], count) = re.subn(r'^#!/usr/bin/perl',
'#!' + options['perl-binary'],
lines[0], 1)
if count > 0:
f.seek(0)
f.writelines(lines)
print "Corrected interpreter for script "+filename
# def post_make_hook(options, buildout):
# location = options['location']
# print "Mioga - postmakehook"
# print "We are currently in", os.getcwd()
# shutil.move("var", location)
# return None
diff --git a/INSTALL b/INSTALL
index ef0f369..b275a48 100644
--- a/INSTALL
+++ b/INSTALL
@@ -382,6 +382,8 @@ A default init script is provided in file web/conf/mioga2.init.d. This script wo
Additional configuration for the Search engine
----------------------------------------------
+The default init script starts the search engine daemon.
+
You must edit the crawl_sample.sh to specify default location for configuration files and rename it to crawl.sh.
This script must be run by cron in a day basis with apache user (www-data for Debian)
diff --git a/Makefile.PL b/Makefile.PL
index 3582d20..c24e217 100644
--- a/Makefile.PL
+++ b/Makefile.PL
@@ -20,8 +20,10 @@
# ============================================================================
+use Cwd;
use ExtUtils::MakeMaker;
use ExtUtils::MakeMaker::Config;
+use File::Find;
# ****************************************************************************
#
@@ -67,6 +69,7 @@ sub CheckPreDepends {
my $disable_check = grep {lc($_) eq 'disable_check'} @ARGV;
+if (! $disable_check) {
CheckPreDepends({ # 'Data::Dumper' => '2.101',
# 'DBI' => '1.13',
# 'DBD::Pg' => '1.01',
@@ -75,7 +78,8 @@ CheckPreDepends({ # 'Data::Dumper' => '2.101',
'Error' => '0.15',
# 'Net::LDAP' => '0.25',
# 'Unicode::String' => '2.06',
- });
+ })
+}
require MiogaConf;
@@ -115,7 +119,7 @@ my $docsubdirs = "docs";
# ----------------------------------------------------------------------------
sub MY::processPL {
return '
-# Not realy in the good section, but at a good place:
+# Not really in the good section, but at a good place:
# between "all" and next "tardist"
include config.mk
@@ -124,10 +128,7 @@ tardist: doc
dist: doc
install ::
if test -e $(TMP_DIR) ; then \
- if test -d $(TMP_DIR) ; then \
- ( su - $(APACHE_USER) -c "id" -s /bin/sh || \
- ( echo "===> Problem with \"$(APACHE_USER)\" user" ; exit 1 ) ) \
- else \
+ if ! test -d $(TMP_DIR) ; then \
echo "===> File $(TMP_DIR) exists but is not a directory ..." ; \
exit 1 ; \
fi \
@@ -156,6 +157,31 @@ install-all ::
(cd web && $(MAKE) setperms) || exit 1;
+slapos-compilation :: install
+ for i in bin locales web ; \
+ do \
+ (cd $$i && $(MAKE) install) || exit 1;\
+ done
+ # Create symlinks for static content that can be shared among all SlapOS instances
+ for i in var/lib/Mioga2/static var/lib/Mioga2/conf/themes/default var/lib/Mioga2/conf/xsl; do \
+ mkdir -p `dirname "$(MIOGA_STATIC)/$$i"` && \
+ mv "$(MIOGA_BASE)/$$i" "$(MIOGA_STATIC)/$$i" && \
+ ln -s "$(MIOGA_STATIC)/$$i" "$(MIOGA_BASE)/$$i"; \
+ done
+ # copy a minimal build system into mioga/build
+ mkdir -p "$(MIOGA_BUILDINST)"
+ cp -R --parents \
+ Makefile.PL sql web/conf web/skel conf bin/search/crawl_sample.sh \
+ "$(MIOGA_BUILDINST)"
+
+slapos-instantiation ::
+ rm -Rf web/conf/apache; (cd web/conf && $(MAKE) apache)
+
+ for i in web/conf conf web/skel sql ; \
+ do \
+ (cd $$i && $(MAKE) install) || exit 1;\
+ done
+
doc:
for i in ' . $docsubdirs . ' ; \
do \
@@ -176,9 +202,18 @@ installall: install install-all
#
# ----------------------------------------------------------------------------
+my $Mioga2_pm = 'lib/Mioga2.pm';
+if ((not -e $Mioga2_pm) and exists($ENV{MIOGA_SITEPERL})) {
+ my $findfile = sub {
+ if ($_ eq 'Mioga2.pm') {
+ $Mioga2_pm = $File::Find::name;
+ }
+ };
+ find($findfile, $ENV{MIOGA_SITEPERL});
+};
WriteMakefile(
'NAME' => 'Mioga',
'DIR' => ['bin', 'web', 'locales', 'docs', 'sql'],
- 'VERSION_FROM' => 'lib/Mioga2.pm',
+ 'VERSION_FROM' => $Mioga2_pm,
);
diff --git a/bin/mailinglist/miogamailinglist.pl b/bin/mailinglist/miogamailinglist.pl
index 80668e2..fa1603a 100755
--- a/bin/mailinglist/miogamailinglist.pl
+++ b/bin/mailinglist/miogamailinglist.pl
@@ -205,11 +205,11 @@ sub MsgSendStock
sub CheckUTF8 {
my ($str) = @_;
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_str = $conv->convert($str);
unless ($tmp_str) {
my $charset = detect($str) || 'iso-8859-15'; # defaults to latin9
- $conv = Text::Iconv->new($charset, "utf8");
+ $conv = Text::Iconv->new($charset, "UTF-8");
$str = $conv->convert($str);
}
return $str;
diff --git a/bin/notifier/searchengine.pl b/bin/notifier/searchengine.pl
index efd1ca2..fc5ba67 100755
--- a/bin/notifier/searchengine.pl
+++ b/bin/notifier/searchengine.pl
@@ -39,8 +39,7 @@ if (!open(FIFO, "+>$fifo")) {
syslog('err', "Can't open fifo: $!. Exiting...");
}
my $flags = fcntl(FIFO, F_GETFL, 0);
-
-
+my $crawler = $config->GetBinariesDir()."/crawl.sh";
# MAIN LOOP
my %instances;
@@ -53,7 +52,7 @@ while (1) {
$instances{$line} += 1;
}
foreach my $inst (keys(%instances)) {
- system("/usr/local/bin/crawl.sh $inst");
+ system("$crawler $inst");
}
fcntl(FIFO, F_SETFL, $flags);
}
diff --git a/conf/Config.xml b/conf/Config.xml
index e614cda..3a9b7e3 100644
--- a/conf/Config.xml
+++ b/conf/Config.xml
@@ -3,6 +3,9 @@
<version module="Mioga2"/>
<config>
+ <parameter name="init_sql" question="Initialize database ?"
+ type="bool" default="no" xpath="/init_sql"/>
+
<parameter name="instance_ident" question="First Mioga instance Identifier ?"
type="text" default="Mioga"
xpath="/instance_ident"/>
@@ -35,8 +38,18 @@
<parameter name="authentication" question="Authentification method ?"
type="enum" default="Mioga2" values="Mioga2"
xpath="/authentication"/>
-
+
+ <parameter name="bin_dir" question="Directory for helper scripts and binaries ?"
+ type="text" default="/usr/local/bin"
+ xpath="/bin_dir"/>
+
<parameter name="Database settings" type="submenu">
+ <parameter name="db_host" question=" Mioga database server name or address ?"
+ type="text" default="localhost"
+ xpath="/database/DBhost"/>
+ <parameter name="db_port" question=" Mioga database server port ?"
+ type="text" default="5432"
+ xpath="/database/DBport"/>
<parameter name="db_name" question=" Name of Mioga database ?"
type="text" default="mioga2"
xpath="/database/DBname"/>
@@ -462,7 +475,6 @@
<app ident="Portal" package="Mioga2::Portal"/>
<app ident="Search" package="Mioga2::Search"/>
<app ident="Magellan" package="Mioga2::Magellan"/>
- <app ident="Mermoz" package="Mioga2::Mermoz"/>
<app ident="RSS" package="Mioga2::RSS"/>
<app ident="Narkissos" package="Mioga2::Narkissos"/>
<app ident="Colbert" package="Mioga2::Colbert"/>
@@ -476,7 +488,7 @@
<mioglet ident="Workspace" package="Mioga2::Portal::WSMioglet"/>
<mioglet ident="Organizer" package="Mioga2::Portal::OrgMioglet"/>
<mioglet ident="News" package="Mioga2::Portal::NewsMioglet"/>
- <mioglet ident="File" package="Mioga2::Portal::FileMioglet"/>
+ <mioglet ident="File" package="Miorouga2::Portal::FileMioglet"/>
<mioglet ident="Articles" package="Mioga2::Portal::ArticlesMioglet"/>
<mioglet ident="Poll" package="Mioga2::Portal::PollMioglet"/>
<mioglet ident="Search" package="Mioga2::Portal::SearchMioglet"/>
diff --git a/lib/Mioga2/Bottin.pm b/lib/Mioga2/Bottin.pm
index ba4cd8d..0db5651 100644
--- a/lib/Mioga2/Bottin.pm
+++ b/lib/Mioga2/Bottin.pm
@@ -1742,11 +1742,11 @@ sub InitSuperAdminMode {
sub CheckUTF8 {
my ($str) = @_;
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_str = $conv->convert($str);
unless ($tmp_str) {
- my $charset = detect($str) || 'iso-8859-15'; # defaults to latin9
- $conv = Text::Iconv->new($charset, "utf8");
+ my $charset = detect($str) || 'ISO-8859-15'; # defaults to latin9
+ $conv = Text::Iconv->new($charset, "UTF-8");
$str = $conv->convert($str);
}
return $str;
diff --git a/lib/Mioga2/Classes/URI.pm b/lib/Mioga2/Classes/URI.pm
index 8678b33..bf97e71 100644
--- a/lib/Mioga2/Classes/URI.pm
+++ b/lib/Mioga2/Classes/URI.pm
@@ -75,15 +75,16 @@ sub new {
# convert uri to UTF-8
my $uri = uri_unescape($options{uri});
+
# attempt to see if uri is utf8 to avoid detection
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_uri = $conv->convert($uri);
-
+
unless ($tmp_uri) {
- my $charset = detect($uri) || 'iso-8859-15'; # defaults to latin9
+ my $charset = detect($uri) || 'ISO-8859-15'; # defaults to latin9
warn "charset = '$charset' for uri = '$uri'" if $debug;
- $conv = Text::Iconv->new($charset, "utf8");
+ $conv = Text::Iconv->new($charset, "UTF-8");
$uri = $conv->convert($uri);
warn "==> converted uri = '$uri'" if $debug;
}
diff --git a/lib/Mioga2/Config.pm b/lib/Mioga2/Config.pm
index b088823..4edac62 100644
--- a/lib/Mioga2/Config.pm
+++ b/lib/Mioga2/Config.pm
@@ -28,7 +28,7 @@ Config.pm: Access class to the current Mioga instance configuration.
This module permits to access to the current Mioga instance
configuration parameters.
-=head1 METHODS DESRIPTION
+=head1 METHODS DESCRIPTION
=cut
diff --git a/lib/Mioga2/DAVFS.pm b/lib/Mioga2/DAVFS.pm
index b56c4ad..7810f82 100644
--- a/lib/Mioga2/DAVFS.pm
+++ b/lib/Mioga2/DAVFS.pm
@@ -141,10 +141,11 @@ use Mioga2::tools::APIAuthz;
use Mioga2::tools::database;
use Mioga2::tools::string_utils;
use Mioga2::tools::Convert;
+use Net::INET6Glue::INET_is_INET6;
use XML::LibXML ();
use Mioga2::Constants;
-my $debug = 0;
+my $debug = 5;
my $MULTI_STATUS = 207; # code DAV for the Multi-status response
@@ -756,13 +757,17 @@ sub make_request {
my $host = $self->{host};
my $port = $self->{port};
+ my $hostport = $host;
+ $hostport = '['.$hostport.']' if $host =~ /:.*:/;
+ $hostport .= ':'.$port;
+
my $dav_uri = $config->GetDAVBasePath;
my $mioga_uri = $config->GetBasePath;
my $orig_uri = Mioga2::Classes::URI->new( uri => $callbacks->{uri}->() );
my $uri = $orig_uri->as_string;
$uri =~ s/^$mioga_uri/$dav_uri/;
- $uri = Mioga2::Classes::URI->new( uri => "$protocol://$host:$port$uri" )
+ $uri = Mioga2::Classes::URI->new( uri => "$protocol://$hostport$uri" )
->as_string;
print STDERR "[Mioga2::DAVFS::make_request] uri = $uri\n" if $debug;
@@ -774,7 +779,7 @@ sub make_request {
# process headers
if ( $header =~ /^destination/i && $value !~ $dav_uri ) {
- $value =~ s!(//)[^/]+$mioga_uri!$1$host$dav_uri!;
+ $value =~ s!(//)[^/]+$mioga_uri!$1$hostport$dav_uri!; # was only $host before!
$value = Mioga2::Classes::URI->new( uri => $value )->as_string;
$value = $self->escapeSpecialChars($value);
}
diff --git a/lib/Mioga2/Database.pm b/lib/Mioga2/Database.pm
index 4afa54b..ca8368c 100644
--- a/lib/Mioga2/Database.pm
+++ b/lib/Mioga2/Database.pm
@@ -61,7 +61,7 @@ sub new {
my $self = { };
bless($self, $class);
- for my $attr (qw/DBIlogin DBIpasswd DBIdriver DBname/) {
+ for my $attr (qw/DBhost DBport DBIlogin DBIpasswd DBIdriver DBname/) {
if (!defined ($attributes{$attr})) {
throw Mioga2::Exception::DB ("[Mioga2::Database::new]", "Cannot connect to database: " . $DBI::errstr);
}
@@ -72,8 +72,10 @@ sub new {
my $dbiPassword = $self->{database}->{DBIpasswd};
my $dbDriver = $self->{database}->{DBIdriver};
my $dbName = $self->{database}->{DBname};
+ my $dbHost = $self->{database}->{DBhost};
+ my $dbPort = $self->{database}->{DBport};
- my $datasource = "dbi:$dbDriver:dbname=$dbName";
+ my $datasource = "dbi:$dbDriver:dbname=$dbName;host=$dbHost;port=$dbPort";
$self->{dbh} = DBI->connect($datasource, $dbiUser, $dbiPassword);
diff --git a/lib/Mioga2/Exception/DB.pm b/lib/Mioga2/Exception/DB.pm
index 7ce5020..dd29d83 100644
--- a/lib/Mioga2/Exception/DB.pm
+++ b/lib/Mioga2/Exception/DB.pm
@@ -1,3 +1,4 @@
+
# ============================================================================
# Mioga2 Project (C) 2003-2007 The Mioga2 Project
#
@@ -53,6 +54,7 @@ sub new {
my($class, $function, $errDB, $errStr, $sql) = @_;
my $self = $class->SUPER::new(-text => "$errStr : $errDB");
$self->{errDB} = $errDB;
+ $self->{errStr} = $errStr;
$self->{sql} = $sql;
$self->{function} = $function;
return $self;
@@ -76,6 +78,27 @@ sub as_string
return $string;
}
+# ----------------------------------------------------------------------------
+=head2 getDBerr ()
+Return the error code as sent by the database connector.
+=cut
+# ----------------------------------------------------------------------------
+sub getDBerr {
+ my ($self) = @_;
+ return $self->{errDB};
+}
+
+# ----------------------------------------------------------------------------
+=head2 getDBerrstr ()
+Return the error string as sent by the database connector.
+=cut
+# ----------------------------------------------------------------------------
+sub getDBerrstr {
+ my ($self) = @_;
+ return $self->{errStr};
+}
+
+
# ============================================================================
=head1 PRIVATE METHODS DESCRIPTION
diff --git a/lib/Mioga2/InstanceList.pm b/lib/Mioga2/InstanceList.pm
index 3e2cf7a..a676a1b 100644
--- a/lib/Mioga2/InstanceList.pm
+++ b/lib/Mioga2/InstanceList.pm
@@ -423,7 +423,9 @@ sub Store {
$conf->RunHooks($self->{config}->{miogaconf});
# Run crawl.sh to initialize search engine database
- my $crawlcmd = $self->{config}->{miogaconf}->GetMiogaPrefix () . "/bin/mioga2_index.pl --conf=" . $self->{config}->GetMiogaConfPath () . ' --search_conf=' . $self->{config}->GetInstallPath . "/conf/search_conf.xml" . ' ' . $self->Get ('ident');
+ my $crawlcmd = $self->{config}->{miogaconf}->GetMiogaPrefix () . "/bin/mioga2_index.pl --conf=" . $self->{config}->GetMiogaConfPath ()
+ . ' --search_conf=' . $self->{config}->{miogaconf}->GetInstallDir()."/conf/search_conf.xml"
+ . ' '. $self->Get ('ident');
system ("$crawlcmd");
}
elsif (scalar (keys (%{$self->{update}}))) {
diff --git a/lib/Mioga2/Magellan.pm b/lib/Mioga2/Magellan.pm
index 86e2e42..4b3814a 100644
--- a/lib/Mioga2/Magellan.pm
+++ b/lib/Mioga2/Magellan.pm
@@ -327,10 +327,13 @@ sub GetNodes {
$mygroup = $group;
}
+ my $host = $config->GetMiogaConf()->GetDAVHost();
+ $host = "[${host}]" if $host =~ /:.*?:/;
+
my $resources = Mioga2::Magellan::DAV::GetCollection(
$context,
$context->GetSessionToken,
- $config->GetMiogaConf ()->GetDAVProtocol () . "://" . $config->GetMiogaConf ()->GetDAVHost () . ":" . $config->GetMiogaConf ()->GetDAVPort (), $node
+ $config->GetMiogaConf ()->GetDAVProtocol () . "://" . $host . ":" . $config->GetMiogaConf ()->GetDAVPort (), $node
);
$inconsistent = pop (@$resources);
@@ -853,7 +856,7 @@ sub GetResource {
);
print STDERR "mime = $mime\n content= $content\n" if ($debug);
if ($mime) {
- #my $conv = Text::Iconv->new( 'utf8', 'utf8' );
+ #my $conv = Text::Iconv->new( 'UTF-8', 'UTF-8' );
#my $encoding = $conv->convert($content);
#if ($encoding) {
if ($mime =~ /(application\/xml)|(text\/)|(application\/xsl)/)
diff --git a/lib/Mioga2/Magellan/Archive.pm b/lib/Mioga2/Magellan/Archive.pm
index 842ae90..3b9e7f7 100644
--- a/lib/Mioga2/Magellan/Archive.pm
+++ b/lib/Mioga2/Magellan/Archive.pm
@@ -163,7 +163,7 @@ sub Content
return undef;
}
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my %folders;
while(my $file = <PRG>) {
chomp($file);
diff --git a/lib/Mioga2/Magellan/DAV.pm b/lib/Mioga2/Magellan/DAV.pm
index cdf77f1..116ebf4 100644
--- a/lib/Mioga2/Magellan/DAV.pm
+++ b/lib/Mioga2/Magellan/DAV.pm
@@ -106,7 +106,7 @@ sub ExecuteRequest
}
if ($response->code >= 500) {
- warn "Mioga2::Magellan::ExecuteRequest Failed: " . $response->content;
+ warn "Mioga2::Magellan::DAV::ExecuteRequest Failed: " . $response->content;
}
return $response;
diff --git a/lib/Mioga2/MailingList.pm b/lib/Mioga2/MailingList.pm
index c7d631c..3a92466 100644
--- a/lib/Mioga2/MailingList.pm
+++ b/lib/Mioga2/MailingList.pm
@@ -749,11 +749,11 @@ sub MailDeleteMsg
sub CheckUTF8 {
my ($str) = @_;
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_str = $conv->convert($str);
unless ($tmp_str) {
- my $charset = detect($str) || 'iso-8859-15'; # defaults to latin9
- $conv = Text::Iconv->new($charset, "utf8");
+ my $charset = detect($str) || 'ISO-8859-15'; # defaults to latin9
+ $conv = Text::Iconv->new($charset, "UTF-8");
$str = $conv->convert($str);
}
return $str;
diff --git a/lib/Mioga2/MiogaConf.pm b/lib/Mioga2/MiogaConf.pm
index 5b74a96..a46e27c 100644
--- a/lib/Mioga2/MiogaConf.pm
+++ b/lib/Mioga2/MiogaConf.pm
@@ -258,7 +258,7 @@ sub GetFilenameEncoding {
sub GetBinariesDir {
my ($self) = @_;
- return $self->{binaries_dir};
+ return $self->{bin_dir};
}
# ============================================================================
diff --git a/lib/Mioga2/Search.pm b/lib/Mioga2/Search.pm
index 2c20259..292f864 100644
--- a/lib/Mioga2/Search.pm
+++ b/lib/Mioga2/Search.pm
@@ -447,13 +447,13 @@ sub CheckArgs
elsif (exists($context->{args}->{query})) {
$query_string = $context->{args}->{query};
}
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_query = $conv->convert($query_string);
unless ($tmp_query) {
- my $charset = detect($query_string) || 'iso-8859-15'; # defaults to latin9
+ my $charset = detect($query_string) || 'ISO-8859-15'; # defaults to latin9
warn "charset = '$charset' for query_string = '$query_string'" if $debug;
- $conv = Text::Iconv->new($charset, "utf8");
+ $conv = Text::Iconv->new($charset, "UTF-8");
$query_string = $conv->convert($query_string);
warn "==> converted query_string = '$query_string'" if $debug;
}
diff --git a/lib/Mioga2/tools/string_utils.pm b/lib/Mioga2/tools/string_utils.pm
index c56cc3b..36bfa5a 100644
--- a/lib/Mioga2/tools/string_utils.pm
+++ b/lib/Mioga2/tools/string_utils.pm
@@ -46,6 +46,7 @@ use Mioga2::Content::XSLT;
use Mioga2::XML::Simple;
use Exporter;
use Text::Iconv;
+use Encode;
use Encode::Detect::Detector;
use Data::Dumper;
use HTML::TokeParser::Simple;
@@ -715,11 +716,11 @@ Check if string is UTF8 and convert it if needed
sub st_CheckUTF8 {
my ($str) = @_;
- my $conv = Text::Iconv->new('utf8', 'utf8');
+ my $conv = Text::Iconv->new('UTF-8', 'UTF-8');
my $tmp_str = $conv->convert($str);
unless ($tmp_str) {
- my $charset = detect($str) || 'iso-8859-15'; # defaults to latin9
- $conv = Text::Iconv->new($charset, "utf8");
+ my $charset = detect($str) || 'ISO-8859-15'; # defaults to latin9
+ $conv = Text::Iconv->new($charset, "UTF-8");
$str = $conv->convert($str);
utf8::decode ($str);
}
diff --git a/lib/MiogaConf.pm b/lib/MiogaConf.pm
index 0870174..bbcc80a 100644
--- a/lib/MiogaConf.pm
+++ b/lib/MiogaConf.pm
@@ -836,6 +836,10 @@ sub CheckDepends
my @missing;
my @missing_clib;
+ # Some modules rewrite $ENV{PATH} without hesitation when "require"d,
+ # we need to put the old one back in place.
+ my $oldpath = $ENV{PATH};
+
foreach my $dep (@{$self->{CONFIG}->{dependencies}->[0]->{dep}}) {
my $version;
if(exists $dep->{version}) {
@@ -858,6 +862,7 @@ sub CheckDepends
}
}
+ $ENV{PATH} = $oldpath;
foreach my $dep (@{$self->{CONFIG}->{dependencies}->[0]->{clib}}) {
my $version;
diff --git a/sql/Makefile b/sql/Makefile
index 07b26f5..3d79b1b 100644
--- a/sql/Makefile
+++ b/sql/Makefile
@@ -15,19 +15,21 @@ install:
cp upgradeMiogletDesc.pl $(DESTDIR)$(INSTALL_DIR)/conf/Config.hook.d
chmod a+x $(DESTDIR)$(INSTALL_DIR)/conf/Config.hook.d/upgradeMiogletDesc.pl
- if [ $(INIT_SQL) = 'yes' ] ; \
+ DB_STATE=`perl -w testdb.pl`; \
+ if [ "$$DB_STATE" = "nodb" -o "$$DB_STATE" = "empty" ] ; \
then \
echo "Initialize database"; \
- su - $(POSTGRES_USER) -c "dropdb $(DB_NAME)" ; \
- su - $(POSTGRES_USER) -c "createdb --encoding UTF8 $(DB_NAME)" && \
- su $(POSTGRES_USER) -c "psql $(DB_NAME) < create_lang.sql" && \
- perl -w -I../lib initdb.pl ; \
- elif [ $(INIT_SQL) != 'noupdate' ]; then \
+ dropdb -h $(DB_HOST) -p $(DB_PORT) -U $(DBI_LOGIN) $(DB_NAME) ; \
+ createdb --encoding UTF8 -h $(DB_HOST) -p $(DB_PORT) -U $(DBI_LOGIN) $(DB_NAME) && \
+ psql -h $(DB_HOST) -p $(DB_PORT) -U $(DBI_LOGIN) $(DB_NAME) < create_lang.sql && \
+ perl -w -I../lib initdb.pl force_init_sql=1; \
+ elif [ "$$DB_STATE" = "present" ]; then \
echo "Update database"; \
perl -w -I../lib updatedb.pl configxml=$(DESTDIR)$(INSTALL_DIR)/conf/Config.xml; \
+ elif [ "$$DB_STATE" = "noserver" ]; then \
+ echo "ERROR: Cannot connect to the database server!"; \
fi
-
clean:
rm -f *~
diff --git a/sql/schema_base.sql b/sql/schema_base.sql
index b7d8cc2..88d5e2d 100644
--- a/sql/schema_base.sql
+++ b/sql/schema_base.sql
@@ -429,10 +429,6 @@ CREATE OR REPLACE FUNCTION check_group_base_default_profile_id () RETURNS trigge
END;
' LANGUAGE 'plpgsql';
-CREATE TRIGGER m_group_base_default_profile_id_check
- BEFORE DELETE ON m_profile FOR EACH ROW
- EXECUTE PROCEDURE check_group_base_default_profile_id ();
-
CREATE OR REPLACE FUNCTION check_group_base_mioga_id () RETURNS trigger AS '
DECLARE
@@ -839,6 +835,10 @@ create table m_profile (
);
create unique index m_profile_ident_group_index on m_profile (ident, group_id);
+CREATE TRIGGER m_group_base_default_profile_id_check
+ BEFORE DELETE ON m_profile FOR EACH ROW
+ EXECUTE PROCEDURE check_group_base_default_profile_id ();
+
--
-- Add referencial integrity on default_profile_id in m_group_base
diff --git a/sql/testdb.pl b/sql/testdb.pl
new file mode 100755
index 0000000..1952891
--- /dev/null
+++ b/sql/testdb.pl
@@ -0,0 +1,64 @@
+#!/usr/bin/perl -w
+
+# Tests the availability of the Mioga2 database.
+# Returns on stdout one of:
+# 'present' - The Mioga2 database has been found and contains data.
+# 'empty' - The Mioga2 database exists but the table "m_mioga" is empty or nonexistent.
+# 'nodb' - There is no Mioga2 database but the database server is working fine
+# (i.e. one can try to create the database)
+# 'noserver' - Connection to the database server failed.
+
+# This has been written for Mioga2/SlapOS, to avoid overwriting the database
+# when re-instantiating the Apache/mod_perl partition.
+
+use strict;
+use lib "../lib";
+
+use Data::Dumper;
+use DBI;
+use Error qw(:try);
+use Mioga2::Exception::DB;
+use Mioga2::MiogaConf;
+
+# TODO: get this through a parameter?
+# my $configxml = "../conf/Config.xml";
+my $miogaconf = "../web/conf/Mioga.conf";
+
+my $result = 'undefined';
+try {
+ my $config = new Mioga2::MiogaConf($miogaconf);
+ my $dbh = $config->GetDBH();
+ my $sql = 'SELECT COUNT(*) FROM m_mioga';
+ my $sth = $dbh->prepare($sql);
+ my $exec_result = $sth->execute();
+ if (not defined($exec_result)) {
+ throw Mioga2::Exception::DB("testdb.pl 0", $sth->err, $sth->errstr, $sql);
+ }
+ my $res = $sth->fetchrow_arrayref();
+ if (not defined($res)) {
+ throw Mioga2::Exception::DB("testdb.pl 1", $sth->err, $sth->errstr, $sql);
+ }
+ my $count = $res->[0];
+ if (not defined($count)) {
+ throw Mioga2::Exception::DB("testdb.pl 2", $sth->err, $sth->errstr, $sql);
+ } elsif ($count == 0) {
+ $result = 'empty';
+ } else {
+ $result = 'present';
+ }
+} catch Mioga2::Exception::DB with {
+ my $err = shift;
+ my $errstr = $err->getDBerrstr();
+ if ($errstr =~ m#database "mioga2" does not exist#) {
+ $result = 'nodb';
+ } elsif ($errstr =~ m#relation "m_mioga" does not exist#) {
+ $result = 'empty';
+ } else {
+ $result = 'noserver';
+ }
+} otherwise {
+ my $err = shift;
+ $result = 'noserver';
+};
+
+print $result;
diff --git a/sql/updatedb.pl b/sql/updatedb.pl
index 67921e7..bb4db1a 100644
--- a/sql/updatedb.pl
+++ b/sql/updatedb.pl
@@ -25,7 +25,7 @@ foreach my $var qw(configxml miogaconf timezonexml dir) {
my $config = new Mioga2::MiogaConf($miogaconf);
-my $conf = new MiogaConf( dir => $dir, config => $configxml);
+my $conf = new MiogaConf( dir => $dir, config => $configxml, force_init_sql => 0);
$conf->Install($config);
diff --git a/web/Makefile b/web/Makefile
index 66af45f..9649a02 100644
--- a/web/Makefile
+++ b/web/Makefile
@@ -16,11 +16,11 @@ all ::
install ::
- if [ $(INIT_SQL) = 'yes' -a -d $(INSTALL_DIR) ] ; \
- then \
- su - $(POSTGRES_USER) -c "pg_dump -b -Ft --column-inserts $(DB_NAME) | gzip" > ${INSTALL_DIR}/db_dump.tar.gz ; \
- mv $(INSTALL_DIR) $(BACKUP_DIR) ; \
- fi
+# if [ $(INIT_SQL) = 'yes' -a -d $(INSTALL_DIR) ] ; \
+# then \
+# su - $(POSTGRES_USER) -c "pg_dump -b -Ft --column-inserts $(DB_NAME) | gzip" > ${INSTALL_DIR}/db_dump.tar.gz ; \
+# mv $(INSTALL_DIR) $(BACKUP_DIR) ; \
+# fi
mkdir -p $(TMP_DIR)
mkdir -p $(DESTDIR)$(INSTALL_DIR)/$(INSTANCE_IDENT)/$(MIOGA_FILES)
diff --git a/web/conf/Makefile b/web/conf/Makefile
index 1573e5c..189f2fd 100644
--- a/web/conf/Makefile
+++ b/web/conf/Makefile
@@ -14,5 +14,6 @@ install:
# is done in ../Makefile
clean:
+ echo "VHH DEBUG: web/conf make clean, deleting apache/ !"
rm -Rf apache Mioga.conf .memdump
diff --git a/web/conf/startup.pl b/web/conf/startup.pl
index a9dc968..6558061 100644
--- a/web/conf/startup.pl
+++ b/web/conf/startup.pl
@@ -29,6 +29,7 @@ use MIME::Entity ();
use MIME::Parser ();
use MIME::QuotedPrint ();
use MiogaConf ();
+use Net::INET6Glue::INET_is_INET6;
use Net::LDAP ();
use Parse::Yapp::Driver ();
use Storable ();
diff --git a/web/skel/Makefile b/web/skel/Makefile
index e604e7d..fbaad2d 100644
--- a/web/skel/Makefile
+++ b/web/skel/Makefile
@@ -33,7 +33,7 @@ fr_FR : $(SRC_FR_FILES)
install:
rsync $(RSYNC_OPTS) $(SUBDIRS) $(DESTDIR)$(INSTALL_DIR)/conf/skel/
# Update instance default skeletons
- for i in `mioga2_info.pl --conf=$(DESTDIR)$(INSTALL_DIR)/conf/Mioga.conf instances`; do if [ ! -d /var/lib/Mioga2/$$i/MiogaFiles/skel/ ]; then mkdir $(DESTDIR)$(INSTALL_DIR)/$$i/MiogaFiles/skel/; fi; cp -R $(DESTDIR)$(INSTALL_DIR)/conf/skel/* $(DESTDIR)$(INSTALL_DIR)/$$i/MiogaFiles/skel/; done
+# for i in `mioga2_info.pl --conf=$(DESTDIR)$(INSTALL_DIR)/conf/Mioga.conf instances`; do if [ ! -d /var/lib/Mioga2/$$i/MiogaFiles/skel/ ]; then mkdir $(DESTDIR)$(INSTALL_DIR)/$$i/MiogaFiles/skel/; fi; cp -R $(DESTDIR)$(INSTALL_DIR)/conf/skel/* $(DESTDIR)$(INSTALL_DIR)/$$i/MiogaFiles/skel/; done
clean:
rm -rf $(SUBDIRS)
diff --git a/web/skel/src/en_US/group/50-standard.xml b/web/skel/src/en_US/group/50-standard.xml
index 6cf89e5..e1ef140 100644
--- a/web/skel/src/en_US/group/50-standard.xml
+++ b/web/skel/src/en_US/group/50-standard.xml
@@ -12,7 +12,6 @@
<applications>
<application ident="AnimGroup" active="1"/>
<application ident="Magellan" active="1"/>
- <application ident="Mermoz" active="1"/>
<application ident="Organizer"/>
<application ident="Contact"/>
<application ident="Tasks"/>
@@ -38,9 +37,6 @@
<application ident="Magellan">
<all_functions/>
</application>
- <application ident="Mermoz">
- <all_functions/>
- </application>
<application ident="Portal">
<all_functions/>
</application>
@@ -53,10 +49,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- <function>Standard</function>
- </application>
<application ident="Portal">
<function>Portal</function>
</application>
@@ -69,9 +61,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- </application>
<application ident="Portal">
<function>Portal</function>
</application>
diff --git a/web/skel/src/en_US/group/60-admin.xml b/web/skel/src/en_US/group/60-admin.xml
index c73b700..d3f368b 100644
--- a/web/skel/src/en_US/group/60-admin.xml
+++ b/web/skel/src/en_US/group/60-admin.xml
@@ -12,7 +12,6 @@
<applications>
<application ident="AnimGroup" active="1"/>
<application ident="Magellan" active="1"/>
- <application ident="Mermoz" active="1"/>
<application ident="Colbert" active="1"/>
<application ident="Organizer"/>
<application ident="Contact"/>
@@ -39,9 +38,6 @@
<application ident="Magellan">
<all_functions/>
</application>
- <application ident="Mermoz">
- <all_functions/>
- </application>
<application ident="Colbert">
<all_functions/>
</application>
@@ -57,10 +53,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- <function>Standard</function>
- </application>
<application ident="Colbert">
<function>UsersWrite</function>
<function>GroupsWrite</function>
@@ -79,9 +71,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- </application>
<application ident="Colbert">
<function>UsersRead</function>
<function>GroupsRead</function>
diff --git a/web/skel/src/en_US/instance/50-standard.xml b/web/skel/src/en_US/instance/50-standard.xml
index cdb1e88..82a125b 100644
--- a/web/skel/src/en_US/instance/50-standard.xml
+++ b/web/skel/src/en_US/instance/50-standard.xml
@@ -102,10 +102,6 @@
</application>
<application ident="MailingList">
</application>
- <application ident="Mermoz">
- <all_users/>
- <all_groups/>
- </application>
<application ident="Narkissos">
<all_users/>
</application>
diff --git a/web/skel/src/en_US/user/50-standard.xml b/web/skel/src/en_US/user/50-standard.xml
index c8983bb..256400d 100644
--- a/web/skel/src/en_US/user/50-standard.xml
+++ b/web/skel/src/en_US/user/50-standard.xml
@@ -92,7 +92,6 @@
<application ident="FileBrowser"/>
<application ident="Portal"/>
<application ident="Search" active="1"/>
- <application ident="Mermoz"/>
<application ident="RSS" active="1"/>
</applications>
diff --git a/web/skel/src/en_US/user/60-admin.xml b/web/skel/src/en_US/user/60-admin.xml
index 9083b6f..e246d19 100644
--- a/web/skel/src/en_US/user/60-admin.xml
+++ b/web/skel/src/en_US/user/60-admin.xml
@@ -101,7 +101,6 @@
<application ident="FileBrowser"/>
<application ident="Portal"/>
<application ident="Search" active="1"/>
- <application ident="Mermoz"/>
<application ident="RSS" active="1"/>
</applications>
diff --git a/web/skel/src/fr_FR/group/50-standard.xml b/web/skel/src/fr_FR/group/50-standard.xml
index 954f71d..dfc79b1 100644
--- a/web/skel/src/fr_FR/group/50-standard.xml
+++ b/web/skel/src/fr_FR/group/50-standard.xml
@@ -35,9 +35,6 @@
<!-- Le navigateur de fichiers est accessible et actif. -->
<application ident="Magellan" active="1"/>
- <!-- L'application de rédaction d'e-mails est accessible et active. -->
- <application ident="Mermoz" active="1"/>
-
<!-- Le portail est accessible et actif. -->
<application ident="Portal" active="1"/>
@@ -77,11 +74,6 @@
<all_functions/>
</application>
- <!-- Le profil "Animation" donne accès à toutes les fonctions de l'application Mermoz d'envoi d'e-mail. -->
- <application ident="Mermoz">
- <all_functions/>
- </application>
-
<!-- Le profil "Animation" donne accès à toutes les fonctions du portail de groupe. -->
<application ident="Portal">
<all_functions/>
@@ -102,12 +94,6 @@
<function>Read</function>
</application>
- <!-- Le profil "Membre" donne accès uniquement à l'envoi d'e-mail au groupe ou indépendamment aux différentes équipes du groupe via l'application d'envoi d'e-mail Mermoz. -->
- <application ident="Mermoz">
- <function>Base</function>
- <function>Standard</function>
- </application>
-
<!-- Le profil "Membre" donne accès uniquement à la consultation du portail de groupe. -->
<application ident="Portal">
<function>Portal</function>
@@ -128,11 +114,6 @@
<function>Read</function>
</application>
- <!-- Le profil "Invité" donne accès uniquement à l'envoi d'e-mail au groupe via l'application d'envoi d'e-mail Mermoz. -->
- <application ident="Mermoz">
- <function>Base</function>
- </application>
-
<!-- Le profil "Invité" donne accès uniquement à la consultation du portail de groupe. -->
<application ident="Portal">
<function>Portal</function>
@@ -151,6 +132,7 @@
<space type="private">
<!-- La balise "acls" contient les définitions de droits d'accès à la racine de l'espace privé du groupe. -->
+
<acls>
<!-- Les profils "Animation" et "Membre" ont accès en lecture et écriture. -->
diff --git a/web/skel/src/fr_FR/group/60-admin.xml b/web/skel/src/fr_FR/group/60-admin.xml
index d9bd724..1c97ca5 100644
--- a/web/skel/src/fr_FR/group/60-admin.xml
+++ b/web/skel/src/fr_FR/group/60-admin.xml
@@ -12,7 +12,6 @@
<applications>
<application ident="AnimGroup" active="1"/>
<application ident="Magellan" active="1"/>
- <application ident="Mermoz" active="1"/>
<application ident="Colbert" active="1"/>
<application ident="Organizer"/>
<application ident="Contact"/>
@@ -39,9 +38,6 @@
<application ident="Magellan">
<all_functions/>
</application>
- <application ident="Mermoz">
- <all_functions/>
- </application>
<application ident="Colbert">
<all_functions/>
</application>
@@ -57,10 +53,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- <function>Standard</function>
- </application>
<application ident="Colbert">
<function>UsersWrite</function>
<function>GroupsWrite</function>
@@ -79,9 +71,6 @@
<application ident="Magellan">
<function>Read</function>
</application>
- <application ident="Mermoz">
- <function>Base</function>
- </application>
<application ident="Colbert">
<function>UsersRead</function>
<function>GroupsRead</function>
diff --git a/web/skel/src/fr_FR/instance/50-standard.xml b/web/skel/src/fr_FR/instance/50-standard.xml
index 34a8536..f7d1eb1 100644
--- a/web/skel/src/fr_FR/instance/50-standard.xml
+++ b/web/skel/src/fr_FR/instance/50-standard.xml
@@ -215,12 +215,6 @@
<application ident="MailingList">
</application>
- <!-- L'application d'envoi d'e-mail est accessible à tous les utilisateurs et à tous les groupes. -->
- <application ident="Mermoz">
- <all_users/>
- <all_groups/>
- </application>
-
<!-- L'application de gestion des données personnelles est accessible à tous les utilisateurs. -->
<application ident="Narkissos">
<all_users/>
diff --git a/web/skel/src/fr_FR/user/50-standard.xml b/web/skel/src/fr_FR/user/50-standard.xml
index c7da5e8..d07b180 100644
--- a/web/skel/src/fr_FR/user/50-standard.xml
+++ b/web/skel/src/fr_FR/user/50-standard.xml
@@ -138,7 +138,6 @@
<!-- L'afficheur de fichiers, le portail utilisateur et l'application d'envoi d'e-mails Mermoz sont accessible mais ne sont pas actives. L'utilisateur pourra les activer s'il le souhaite. -->
<application ident="FileBrowser"/>
<application ident="Portal"/>
- <application ident="Mermoz"/>
</applications>
diff --git a/web/skel/src/fr_FR/user/60-admin.xml b/web/skel/src/fr_FR/user/60-admin.xml
index afff18d..4d4df04 100644
--- a/web/skel/src/fr_FR/user/60-admin.xml
+++ b/web/skel/src/fr_FR/user/60-admin.xml
@@ -101,7 +101,6 @@
<application ident="FileBrowser"/>
<application ident="Portal"/>
<application ident="Search" active="1"/>
- <application ident="Mermoz"/>
<application ident="RSS" active="1"/>
</applications>
[buildout]
develop =
/srv/slapgrid/slappart9/srv/runner/project/slapos
/opt/slapdev_build
extends =
../../component/rsync/buildout.cfg
../../component/apache-perl/buildout.cfg
../../component/perl-XML-Parser/buildout.cfg
../../component/perl-XML-LibXML/buildout.cfg
../../component/perl-Term-ReadLine-Gnu/buildout.cfg
../../component/perl-Image-Magick/buildout.cfg
../../component/postgresql/buildout.cfg
../../component/libxslt/buildout.cfg
../../component/dcron/buildout.cfg
../../component/dropbear/buildout.cfg
../../component/lxml-python/buildout.cfg
../../stack/slapos.cfg
parts =
eggs
apache-perl
perl-Apache2-Request
perl-Crypt-SSLeay
perl-DBD-Pg
perl-XML-Parser
perl-XML-LibXML
perl-XML-LibXSLT
perl-Term-ReadLine-Gnu
perl-Text-Iconv
perl-Image-Magick
perl-String-Checker-mioga
perl-Search-Xapian
cpan-simple-modules
rsync
mioga
template
template-apacheperl
template-postgres
# Those were unmaintained components inlined here
# slapos.recipe.build:cpan no longer exist, we now use perl-CPAN-package macro instead
# ../../component/perl-Crypt-SSLeay/buildout.cfg
[buildout]
extends +=
../../component/openssl/buildout.cfg
../../component/zlib/buildout.cfg
[perl-Crypt-SSLeay]
recipe = slapos.recipe.build:cpan
modules =
G/GA/GAAS/URI-1.60.tar.gz
N/NA/NANIS/Crypt-SSLeay-0.64.tar.gz
cpan-configuration =
make_arg=('OTHERLDFLAGS="-L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${openssl:location}/lib -Wl,-R${openssl:location}/lib"')
makepl_arg=('INC=-I${openssl:location}/include')
environment =
OPENSSL_PREFIX=${openssl:location}
perl = perl
# ../../component/perl-XML-LibXSLT/buildout.cfg
[buildout]
extends +=
../libxslt/buildout.cfg
../libxml2/buildout.cfg
../zlib/buildout.cfg
[perl-XML-LibXSLT]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libxslt:location}/lib -Wl,-R${libxslt:location}/lib -L${zlib:location}/lib -Wl,-R${zlib:location}/lib -L${libxml2:location}/lib -Wl,-R${libxml2:location}/lib" INC="-I${libxslt:location}/include -I${libxml2:location}/include/libxml2"'
modules =
S/SH/SHLOMIF/XML-LibXSLT-1.78.tar.gz
perl = perl
# ../../component/perl-Text-Iconv/buildout.cfg
[buildout]
extends +=
../libiconv/buildout.cfg
[perl-Text-Iconv]
recipe = slapos.recipe.build:cpan
cpan-configuration =
makepl_arg='LIBS="-L${libiconv:location}/lib -Wl,-R${libiconv:location}/lib" INC="-I${libiconv:location}/include"'
modules =
M/MP/MPIOTR/Text-Iconv-1.7.tar.gz
perl = perl
# ../../component/perl-DBD-Pg/buildout.cfg
[buildout]
extends +=
../postgresql/buildout.cfg
[perl-DBD-Pg]
recipe = slapos.recipe.build:cpan
modules =
T/TU/TURNSTEP/DBD-Pg-2.19.3.tar.gz
environment =
POSTGRES_HOME=${postgresql:location}
perl = perl
# ../../component/perl-Search-Xapian/buildout.cfg
[buildout]
extends +=
../xapian/buildout.cfg
[perl-Search-Xapian]
recipe = slapos.recipe.build:cpan
modules =
O/OL/OLLY/Search-Xapian-1.2.10.0.tar.gz
environment =
XAPIAN_CONFIG=${xapian:location}/bin/xapian-config
perl = perl
[eggs]
recipe = zc.recipe.egg
eggs =
${lxml-python:egg}
slapos.cookbook
cns.recipe.symlink
# override perl here to keep using 5.14.x.
[perl]
recipe = hexagonit.recipe.cmmi
version = 5.14.2
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 04a4c5d3c1f9f19d77daff8e8cd19a26
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1
patches =
${perl-keep-linker-flags-in-ldflags.patch:location}/${perl-keep-linker-flags-in-ldflags.patch:filename}
# Viktor has adapted the following commands for AMD64 compilation
# TODO: find out how we can write a generic code that suits all architectures
configure-command =
sh Configure -des \
-A ccflags=-fPIC \
-Dprefix=${buildout:parts-directory}/${:_buildout_section_name_} \
-Dsiteprefix=${:siteprefix} \
-Dcflags=-I${gdbm:location}/include \
-Dldflags="-L${gdbm:location}/lib -Wl,-rpath=${gdbm:location}/lib" \
-Ui_db \
-Dnoextensions=ODBM_File \
-Dusethreads
environment =
PATH=${patch:location}/bin:%(PATH)s
CFLAGS='-m64 -mtune=nocona'
post-make-hook = ${perl-postmakehook-download:location}/${perl-postmakehook-download:filename}:post_make_hook
[cpan-simple-modules]
recipe = slapos.recipe.build:cpan
modules =
S/ST/STBEY/Date-Calc-6.3.tar.gz
D/DC/DCOPPIT/Benchmark-Timer-0.7102.tar.gz
R/RB/RBOW/Date-ICal-2.678.tar.gz
S/SB/SBECK/Date-Manip-6.37.tar.gz
G/GB/GBARR/TimeDate-1.20.tar.gz
S/SH/SHLOMIF/Error-0.17018.tar.gz
P/PA/PARDUS/File-MimeInfo/File-MimeInfo-0.16.tar.gz
O/OV/OVID/HTML-TokeParser-Simple-3.15.tar.gz
D/DS/DSKOLL/MIME-tools-5.503.tar.gz
D/DS/DSKOLL/IO-stringy-2.110.tar.gz
C/CO/COSIMO/HTTP-DAV-0.47.tar.gz
M/MA/MARSCHAP/perl-ldap-0.48.tar.gz
F/FD/FDESAR/Parse-Yapp-1.05.tar.gz
S/SN/SNOWHARE/Unicode-MapUTF8-1.11.tar.gz
G/GA/GAAS/Unicode-String-2.09.tar.gz
T/TY/TYEMQ/Algorithm-Diff-1.1902.tar.gz
J/JG/JGMYERS/Encode-Detect-1.01.tar.gz
G/GU/GUIDO/libintl-perl-1.20.tar.gz
K/KE/KEN/XML-XML2JSON-0.06.tar.gz
A/AR/ARISTOTLE/XML-Atom-SimpleFeed-0.86.tar.gz
P/PE/PETDANCE/Test-WWW-Mechanize-1.44.tar.gz
G/GR/GRANTM/XML-Simple-2.20.tar.gz
A/AD/ADAMK/Archive-Zip-1.30.tar.gz
D/DU/DURIST/Proc-ProcessTable-0.45.tar.gz
S/SU/SULLR/Net-INET6Glue-0.5.tar.gz
perl = perl
cpan-configuration =
makepl_arg=''
make_arg=''
[perl-String-Checker-mioga]
recipe = hexagonit.recipe.cmmi
url = http://packages.alixen.org/contribs/String-Checker-0.03.tar.gz
md5sum = c750a33505609544f95eace7a2896c84
configure-command =
${perl:location}/bin/perl Makefile.PL
[mioga]
recipe = hexagonit.recipe.cmmi
version = 2.4.16
# No use re-using "version", the whole URL will change for the next one
url = http://www.alixen.org/attachments/download/89/Mioga2-2.4.16.tar.gz
md5sum = 1d2e76c798ee6d5f233011997200e125
location = ${buildout:parts-directory}/${:_buildout_section_name_}
buildinst = ${mioga:location}/buildinst
static = ${mioga:location}/static
environment =
MIOGA_BASE=${mioga:location}
MIOGA_BUILDINST=${mioga:buildinst}
MIOGA_STATIC=${mioga:static}
MIOGA_SITEPERL=${perl:siteprefix}
PATH=${libxslt:location}/bin:${libxml2:location}/bin:${perl:location}/bin:${perl:siteprefix}/bin:${rsync:location}/bin:%(PATH)s
patch-options = -p1
patches =
${mioga-patch:location}/${mioga-patch:filename}
# post-make-hook = ${mioga-postmakehook:location}/${mioga-postmakehook:filename}:post_make_hook
pre-configure-hook = ${mioga-hooks:location}/${mioga-hooks:filename}:pre_configure_hook
configure-command =
${perl:location}/bin/perl Makefile.PL
make-targets =
slapos-compilation
keep-compile-dir = true
perl-binary = ${perl:location}/bin/perl
[mioga-patch]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
# md5sum = b836ad89902d1ea68b091a5b9800edd8
download-only = true
filename = ${:_buildout_section_name_}
[mioga-hooks]
recipe = hexagonit.recipe.download
url = ${:_profile_base_location_}/${:filename}
# md5sum = c7ceec7788749238cb5fbe09beb647b1
download-only = true
filename = mioga-hooks.py
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg
# md5sum =
output = ${buildout:directory}/template.cfg
mode = 0644
[template-apacheperl]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-apacheperl.cfg
# md5sum =
output = ${buildout:directory}/template-apacheperl.cfg
mode = 0644
compile-directory = ${mioga:compile-directory}/Mioga2-${mioga:version}
[template-postgres]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-postgres.cfg
# md5sum =
output = ${buildout:directory}/template-postgres.cfg
mode = 0644
# Exactly the same as software.cfg, but fetch the slapos.cookbook and
# slapos.toolbox from git repository instead of fetching stable version,
# allowing to play with bleeding edge environment.
# You'll need to run buildout twice for this profile.
[buildout]
extends =
../../component/git/buildout.cfg
software.cfg
parts +=
# Development parts
slapos.cookbook-repository
slapos.core-repository
slapos.toolbox-repository
check-recipe
develop =
${:parts-directory}/slapos.cookbook-repository
${:parts-directory}/slapos.core-repository
${:parts-directory}/slapos.toolbox-repository
[slapos.toolbox-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.toolbox.git
branch = master
git-executable = ${git:location}/bin/git
[slapos.cookbook-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.git
branch = tt-rss
git-executable = ${git:location}/bin/git
[slapos.core-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.core.git
branch = master
git-executable = ${git:location}/bin/git
[check-recipe]
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command =
grep parts ${buildout:develop-eggs-directory}/slapos.cookbook.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.core.egg-link &&
grep parts ${buildout:develop-eggs-directory}/slapos.toolbox.egg-link
[versions]
slapos.cookbook =
slapos.toolbox =
slapos.core =
[buildout]
# Run a SQL script to populate DB at first run
[tt-rss-init]
recipe = slapos.cookbook:apachephpconfigure
table_name = db.ttrss_users
#constraint = `pn_id`>0
lampconfigure = ${buildout:bin-directory}/lampconfigure
htdocs = $${apache-php:htdocs}
mysql-username = $${apache-php:mysql-username}
mysql-password = $${apache-php:mysql-password}
mysql-database = $${apache-php:mysql-database}
mysql-host = $${apache-php:mysql-host}
mysql-port = $${apache-php:mysql-port}
configureinstall-location = $${basedirectory:scripts}/configureInstall
sql-script = ${sql-script:location}/${sql-script:filename}
# Give the correct information to apache recipe to put the hostname in the
# tt-rss config file
[apache-php]
application-url = $${request-frontend:connection-site_url}
SET NAMES utf8;
SET CHARACTER SET utf8;
begin;
create table ttrss_users (id integer primary key not null auto_increment,
login varchar(120) not null unique,
pwd_hash varchar(250) not null,
last_login datetime default null,
access_level integer not null default 0,
theme_id integer default null,
email varchar(250) not null default '',
full_name varchar(250) not null default '',
email_digest bool not null default false,
last_digest_sent datetime default null,
salt varchar(250) not null default '',
created datetime default null,
twitter_oauth longtext default null,
otp_enabled boolean not null default false,
index (theme_id)) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_users (login,pwd_hash,access_level) values ('admin',
'SHA1:5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8', 10);
create table ttrss_feed_categories(id integer not null primary key auto_increment,
owner_uid integer not null,
title varchar(200) not null,
collapsed bool not null default false,
order_id integer not null default 0,
parent_cat integer,
index(parent_cat),
foreign key (parent_cat) references ttrss_feed_categories(id) ON DELETE SET NULL,
index(owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_archived_feeds (id integer not null primary key,
owner_uid integer not null,
title varchar(200) not null,
feed_url text not null,
site_url varchar(250) not null default '',
index(owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_counters_cache (
feed_id integer not null,
owner_uid integer not null,
value integer not null default 0,
updated datetime not null,
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE
);
create index ttrss_counters_cache_feed_id_idx on ttrss_counters_cache(feed_id);
create index ttrss_counters_cache_owner_uid_idx on ttrss_counters_cache(owner_uid);
create index ttrss_counters_cache_value_idx on ttrss_counters_cache(value);
create table ttrss_cat_counters_cache (
feed_id integer not null,
owner_uid integer not null,
value integer not null default 0,
updated datetime not null,
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE
);
create index ttrss_cat_counters_cache_owner_uid_idx on ttrss_cat_counters_cache(owner_uid);
create table ttrss_feeds (id integer not null auto_increment primary key,
owner_uid integer not null,
title varchar(200) not null,
cat_id integer default null,
feed_url text not null,
icon_url varchar(250) not null default '',
update_interval integer not null default 0,
purge_interval integer not null default 0,
last_updated datetime default 0,
last_error varchar(250) not null default '',
site_url varchar(250) not null default '',
auth_login varchar(250) not null default '',
auth_pass varchar(250) not null default '',
parent_feed integer default null,
private bool not null default false,
rtl_content bool not null default false,
hidden bool not null default false,
include_in_digest boolean not null default true,
cache_images boolean not null default false,
hide_images boolean not null default false,
cache_content boolean not null default false,
auth_pass_encrypted boolean not null default false,
last_viewed datetime default null,
last_update_started datetime default null,
always_display_enclosures boolean not null default false,
update_method integer not null default 0,
order_id integer not null default 0,
mark_unread_on_update boolean not null default false,
update_on_checksum_change boolean not null default false,
strip_images boolean not null default false,
pubsub_state integer not null default 0,
favicon_last_checked datetime default null,
index(owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE,
index(cat_id),
foreign key (cat_id) references ttrss_feed_categories(id) ON DELETE SET NULL,
index(parent_feed),
foreign key (parent_feed) references ttrss_feeds(id) ON DELETE SET NULL) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_feeds_owner_uid_index on ttrss_feeds(owner_uid);
create index ttrss_feeds_cat_id_idx on ttrss_feeds(cat_id);
insert into ttrss_feeds (owner_uid, title, feed_url) values
(1, 'Tiny Tiny RSS: New Releases', 'http://tt-rss.org/releases.rss');
insert into ttrss_feeds (owner_uid, title, feed_url) values
(1, 'Tiny Tiny RSS: Forum', 'http://tt-rss.org/forum/rss.php');
create table ttrss_entries (id integer not null primary key auto_increment,
title text not null,
guid varchar(255) not null unique,
link text not null,
updated datetime not null,
content longtext not null,
content_hash varchar(250) not null,
cached_content longtext,
no_orig_date bool not null default 0,
date_entered datetime not null,
date_updated datetime not null,
num_comments integer not null default 0,
plugin_data longtext,
comments varchar(250) not null default '',
author varchar(250) not null default '') ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_entries_date_entered_index on ttrss_entries(date_entered);
create index ttrss_entries_guid_index on ttrss_entries(guid);
create index ttrss_entries_updated_idx on ttrss_entries(updated);
create table ttrss_user_entries (
int_id integer not null primary key auto_increment,
ref_id integer not null,
uuid varchar(200) not null,
feed_id int,
orig_feed_id int,
owner_uid integer not null,
marked bool not null default 0,
published bool not null default 0,
tag_cache text not null,
label_cache text not null,
last_read datetime,
score int not null default 0,
note longtext,
last_marked datetime,
last_published datetime,
unread bool not null default 1,
index (ref_id),
foreign key (ref_id) references ttrss_entries(id) ON DELETE CASCADE,
index (feed_id),
foreign key (feed_id) references ttrss_feeds(id) ON DELETE CASCADE,
index (orig_feed_id),
foreign key (orig_feed_id) references ttrss_archived_feeds(id) ON DELETE SET NULL,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_user_entries_owner_uid_index on ttrss_user_entries(owner_uid);
create index ttrss_user_entries_ref_id_index on ttrss_user_entries(ref_id);
create index ttrss_user_entries_feed_id on ttrss_user_entries(feed_id);
create index ttrss_user_entries_unread_idx on ttrss_user_entries(unread);
create table ttrss_entry_comments (id integer not null primary key,
ref_id integer not null,
owner_uid integer not null,
private bool not null default 0,
date_entered datetime not null,
index (ref_id),
foreign key (ref_id) references ttrss_entries(id) ON DELETE CASCADE,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_filter_types (id integer primary key,
name varchar(120) unique not null,
description varchar(250) not null unique) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_filter_types (id,name,description) values (1, 'title', 'Title');
insert into ttrss_filter_types (id,name,description) values (2, 'content', 'Content');
insert into ttrss_filter_types (id,name,description) values (3, 'both',
'Title or Content');
insert into ttrss_filter_types (id,name,description) values (4, 'link',
'Link');
insert into ttrss_filter_types (id,name,description) values (5, 'date',
'Article Date');
insert into ttrss_filter_types (id,name,description) values (6, 'author', 'Author');
insert into ttrss_filter_types (id,name,description) values (7, 'tag', 'Article Tags');
create table ttrss_filter_actions (id integer not null primary key,
name varchar(120) unique not null,
description varchar(250) not null unique) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_filter_actions (id,name,description) values (1, 'filter',
'Delete article');
insert into ttrss_filter_actions (id,name,description) values (2, 'catchup',
'Mark as read');
insert into ttrss_filter_actions (id,name,description) values (3, 'mark',
'Set starred');
insert into ttrss_filter_actions (id,name,description) values (4, 'tag',
'Assign tags');
insert into ttrss_filter_actions (id,name,description) values (5, 'publish',
'Publish article');
insert into ttrss_filter_actions (id,name,description) values (6, 'score',
'Modify score');
insert into ttrss_filter_actions (id,name,description) values (7, 'label',
'Assign label');
create table ttrss_filters (id integer not null primary key auto_increment,
owner_uid integer not null,
feed_id integer default null,
filter_type integer not null,
reg_exp varchar(250) not null,
filter_param varchar(250) not null default '',
inverse bool not null default false,
enabled bool not null default true,
cat_filter bool not null default false,
cat_id integer default null,
action_id integer not null default 1,
action_param varchar(250) not null default '',
index (filter_type),
foreign key (filter_type) references ttrss_filter_types(id) ON DELETE CASCADE,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE,
index (feed_id),
foreign key (feed_id) references ttrss_feeds(id) ON DELETE CASCADE,
index (cat_id),
foreign key (cat_id) references ttrss_feed_categories(id) ON DELETE CASCADE,
index (action_id),
foreign key (action_id) references ttrss_filter_actions(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_filters2(id integer primary key auto_increment,
owner_uid integer not null,
match_any_rule boolean not null default false,
enabled boolean not null default true,
index(owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_filters2_rules(id integer primary key auto_increment,
filter_id integer not null references ttrss_filters2(id) on delete cascade,
reg_exp varchar(250) not null,
filter_type integer not null,
feed_id integer default null,
cat_id integer default null,
cat_filter boolean not null default false,
index (filter_id),
foreign key (filter_id) references ttrss_filters2(id) on delete cascade,
index (filter_type),
foreign key (filter_type) references ttrss_filter_types(id) ON DELETE CASCADE,
index (feed_id),
foreign key (feed_id) references ttrss_feeds(id) ON DELETE CASCADE,
index (cat_id),
foreign key (cat_id) references ttrss_feed_categories(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_filters2_actions(id integer primary key auto_increment,
filter_id integer not null,
action_id integer not null default 1 references ttrss_filter_actions(id) on delete cascade,
action_param varchar(250) not null default '',
index (filter_id),
foreign key (filter_id) references ttrss_filters2(id) on delete cascade,
index (action_id),
foreign key (action_id) references ttrss_filter_actions(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_tags (id integer primary key auto_increment,
owner_uid integer not null,
tag_name varchar(250) not null,
post_int_id integer not null,
index (post_int_id),
foreign key (post_int_id) references ttrss_user_entries(int_id) ON DELETE CASCADE,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_version (schema_version int not null) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_version values (106);
create table ttrss_enclosures (id integer primary key auto_increment,
content_url text not null,
content_type varchar(250) not null,
post_id integer not null,
title text not null,
duration text not null,
index (post_id),
foreign key (post_id) references ttrss_entries(id) ON DELETE cascade) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_enclosures_post_id_idx on ttrss_enclosures(post_id);
create table ttrss_settings_profiles(id integer primary key auto_increment,
title varchar(250) not null,
owner_uid integer not null,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_prefs_types (id integer not null primary key,
type_name varchar(100) not null) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_prefs_types (id, type_name) values (1, 'bool');
insert into ttrss_prefs_types (id, type_name) values (2, 'string');
insert into ttrss_prefs_types (id, type_name) values (3, 'integer');
create table ttrss_prefs_sections (id integer not null primary key,
order_id integer not null,
section_name varchar(100) not null) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
insert into ttrss_prefs_sections (id, section_name, order_id) values (1, 'General', 0);
insert into ttrss_prefs_sections (id, section_name, order_id) values (2, 'Interface', 1);
insert into ttrss_prefs_sections (id, section_name, order_id) values (3, 'Advanced', 3);
insert into ttrss_prefs_sections (id, section_name, order_id) values (4, 'Digest', 2);
create table ttrss_prefs (pref_name varchar(250) not null primary key,
type_id integer not null,
section_id integer not null default 1,
short_desc text not null,
help_text varchar(250) not null default '',
access_level integer not null default 0,
def_value text not null,
index(type_id),
foreign key (type_id) references ttrss_prefs_types(id),
index(section_id),
foreign key (section_id) references ttrss_prefs_sections(id)) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_prefs_pref_name_idx on ttrss_prefs(pref_name);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('PURGE_OLD_DAYS', 3, '60', 'Purge articles after this number of days (0 - disables)',1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('DEFAULT_UPDATE_INTERVAL', 3, '30', 'Default interval between feed updates',1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('DEFAULT_ARTICLE_LIMIT', 3, '30', 'Amount of articles to display at once',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('ALLOW_DUPLICATE_POSTS', 1, 'true', 'Allow duplicate posts',1, 'This option is useful when you are reading several planet-type aggregators with partially colliding userbase. When disabled, it forces same posts from different feeds to appear only once.');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('ENABLE_FEED_CATS', 1, 'true', 'Enable feed categories',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('SHOW_CONTENT_PREVIEW', 1, 'true', 'Show content preview in headlines list',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('SHORT_DATE_FORMAT', 2, 'M d, G:i', 'Short date format',3);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('LONG_DATE_FORMAT', 2, 'D, M d Y - G:i', 'Long date format',3);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('COMBINED_DISPLAY_MODE', 1, 'false', 'Combined feed display',2, 'Display expanded list of feed articles, instead of separate displays for headlines and article content');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('HIDE_READ_FEEDS', 1, 'false', 'Hide feeds with no unread messages',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('ON_CATCHUP_SHOW_NEXT_FEED', 1, 'false', 'On catchup show next feed',2, 'Automatically open next feed with unread articles after marking one as read');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('FEEDS_SORT_BY_UNREAD', 1, 'false', 'Sort feeds by unread articles count',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('REVERSE_HEADLINES', 1, 'false', 'Reverse headline order (oldest first)',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('DIGEST_ENABLE', 1, 'false', 'Enable e-mail digest',4, 'This option enables sending daily digest of new (and unread) headlines on your configured e-mail address');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('CONFIRM_FEED_CATCHUP', 1, 'true', 'Confirm marking feed as read',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('CDM_AUTO_CATCHUP', 1, 'false', 'Automatically mark articles as read',2, 'This option enables marking articles as read automatically while you scroll article list.');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_DEFAULT_VIEW_MODE', 2, 'adaptive', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_DEFAULT_VIEW_LIMIT', 3, '30', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_PREFS_ACTIVE_TAB', 2, '', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('STRIP_UNSAFE_TAGS', 1, 'true', 'Strip unsafe tags from articles', 3, 'Strip all but most common HTML tags when reading articles.');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('BLACKLISTED_TAGS', 2, 'main, generic, misc, uncategorized, blog, blogroll, general, news', 'Blacklisted tags', 3, 'When auto-detecting tags in articles these tags will not be applied (comma-separated list).');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('FRESH_ARTICLE_MAX_AGE', 3, '24', 'Maximum age of fresh articles (in hours)',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('DIGEST_CATCHUP', 1, 'false', 'Mark articles in e-mail digest as read',4);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('CDM_EXPANDED', 1, 'true', 'Automatically expand articles in combined mode',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('PURGE_UNREAD_ARTICLES', 1, 'true', 'Purge unread articles',3);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('HIDE_READ_SHOWS_SPECIAL', 1, 'true', 'Show special feeds when hiding read feeds',2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('VFEED_GROUP_BY_FEED', 1, 'false', 'Group headlines in virtual feeds',2, 'When this option is enabled, headlines in Special feeds and Labels are grouped by feeds');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('STRIP_IMAGES', 1, 'false', 'Do not embed images in articles', 2);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_DEFAULT_VIEW_ORDER_BY', 2, 'default', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('ENABLE_API_ACCESS', 1, 'false', 'Enable external API', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_COLLAPSED_SPECIAL', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_COLLAPSED_LABELS', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_COLLAPSED_UNCAT', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_COLLAPSED_FEEDLIST', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_ENABLE_CATS', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_SHOW_IMAGES', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_HIDE_READ', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_SORT_FEEDS_UNREAD', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_THEME_ID', 2, '0', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('USER_TIMEZONE', 2, 'UTC', 'User timezone', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('USER_STYLESHEET', 2, '', 'Customize stylesheet', 2, 'Customize CSS stylesheet to your liking');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('SORT_HEADLINES_BY_FEED_DATE', 1, 'true', 'Sort headlines by feed date',2, 'Use feed-specified date to sort headlines instead of local import date.');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_BROWSE_CATS', 1, 'true', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('SSL_CERT_SERIAL', 2, '', 'Login with an SSL certificate',3, 'Click to register your SSL client certificate with tt-rss');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id,help_text) values('DIGEST_PREFERRED_TIME', 2, '00:00', 'Try to send digests around specified time', 4, 'Uses UTC timezone');
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_PREFS_SHOW_EMPTY_CATS', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_DEFAULT_INCLUDE_CHILDREN', 1, 'false', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('AUTO_ASSIGN_LABELS', 1, 'true', 'Assign articles to labels automatically', 3);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_ENABLED_PLUGINS', 2, '', '', 1);
insert into ttrss_prefs (pref_name,type_id,def_value,short_desc,section_id) values('_MOBILE_REVERSE_HEADLINES', 1, 'false', '', 1);
update ttrss_prefs set access_level = 1 where pref_name in ('ON_CATCHUP_SHOW_NEXT_FEED',
'SORT_HEADLINES_BY_FEED_DATE',
'VFEED_GROUP_BY_FEED',
'FRESH_ARTICLE_MAX_AGE',
'CDM_EXPANDED',
'SHOW_CONTENT_PREVIEW',
'AUTO_ASSIGN_LABELS',
'HIDE_READ_SHOWS_SPECIAL');
create table ttrss_user_prefs (
owner_uid integer not null,
pref_name varchar(250),
value longtext not null,
profile integer,
index (profile),
foreign key (profile) references ttrss_settings_profiles(id) ON DELETE CASCADE,
index (owner_uid),
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE,
index (pref_name),
foreign key (pref_name) references ttrss_prefs(pref_name) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create index ttrss_user_prefs_owner_uid_index on ttrss_user_prefs(owner_uid);
create index ttrss_user_prefs_pref_name_idx on ttrss_user_prefs(pref_name);
create table ttrss_sessions (id varchar(250) unique not null primary key,
data text,
expire integer not null,
index (id),
index (expire)) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_feedbrowser_cache (
feed_url text not null,
site_url text not null,
title text not null,
subscribers integer not null) DEFAULT CHARSET=UTF8;
create table ttrss_labels2 (id integer not null primary key auto_increment,
owner_uid integer not null,
caption varchar(250) not null,
fg_color varchar(15) not null default '',
bg_color varchar(15) not null default '',
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_user_labels2 (label_id integer not null,
article_id integer not null,
foreign key (label_id) references ttrss_labels2(id) ON DELETE CASCADE,
foreign key (article_id) references ttrss_entries(id) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_access_keys (id integer not null primary key auto_increment,
access_key varchar(250) not null,
feed_id varchar(250) not null,
is_cat bool not null default false,
owner_uid integer not null,
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_linked_instances (id integer not null primary key auto_increment,
last_connected datetime not null,
last_status_in integer not null,
last_status_out integer not null,
access_key varchar(250) not null unique,
access_url text not null) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_linked_feeds (
feed_url text not null,
site_url text not null,
title text not null,
created datetime not null,
updated datetime not null,
instance_id integer not null,
subscribers integer not null,
foreign key (instance_id) references ttrss_linked_instances(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
create table ttrss_plugin_storage (
id integer not null auto_increment primary key,
name varchar(100) not null,
owner_uid integer not null,
content longtext not null,
foreign key (owner_uid) references ttrss_users(id) ON DELETE CASCADE) ENGINE=InnoDB DEFAULT CHARSET=UTF8;
commit;
[buildout]
extends =
../../stack/lamp/buildout.cfg
[versions]
slapos.cookbook =
[application]
recipe = slapos.recipe.build:download-unpacked
url = https://github.com/gothfox/Tiny-Tiny-RSS/archive/1.7.8.tar.gz
md5sum = efd7eec1629db379896fb7e74bba400e
strip-top-level-dir = true
[application-template]
recipe = slapos.recipe.build:download
url = ${:_profile_base_location_}/template/config.php.in
#md5sum = Student may put here md5sum of this file, this is good idea
filename = template.in
mode = 0644
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[application-configuration]
location = config.php
[sql-script]
recipe = slapos.recipe.build:download
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = ${:_profile_base_location_}/script/tt-rss.sql
#md5sum = c4d5f87d8f02cad3f20e679160195f48
filename = tt-rss.sql
mode = 0744
# XXX Should disappear and be integrated into apachephpconfigure
[configure-script]
recipe = slapos.recipe.build:download
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = ${:_profile_base_location_}/configure-tt-rss.py
#md5sum = c4d5f87d8f02cad3f20e679160195f48
filename = configure-tt-rss.py
mode = 0744
[custom-application-deployment-template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-custom.cfg.in
output = ${buildout:directory}/instance-custom.cfg
#md5sum = 283cb53ff8cd34635703e771062db919
mode = 0644
[custom-application-deployment]
path = ${custom-application-deployment-template:output}
part-list = tt-rss-init
<?php
// *******************************************
// *** Database configuration (important!) ***
// *******************************************
define('DB_TYPE', "mysql"); // or mysql
define('DB_HOST', "%(mysql_host)s");
define('DB_USER', "%(mysql_user)s");
define('DB_NAME', "%(mysql_database)s");
define('DB_PASS', "%(mysql_password)s");
define('MYSQL_CHARSET', 'UTF8');
// Connection charset for MySQL. If you have a legacy database and/or experience
// garbage unicode characters with this option, try setting it to a blank string.
// ***********************************
// *** Basic settings (important!) ***
// ***********************************
define('SELF_URL_PATH', '%(url)s');
// Full URL of your tt-rss installation. This should be set to the
// location of tt-rss directory, e.g. http://yourserver/tt-rss/
// You need to set this option correctly otherwise several features
// including PUSH, bookmarklets and browser integration will not work properly.
define('SINGLE_USER_MODE', true);
// Operate in single user mode, disables all functionality related to
// multiple users.
define('SIMPLE_UPDATE_MODE', false);
// Enables fallback update mode where tt-rss tries to update feeds in
// background while tt-rss is open in your browser.
// If you don't have a lot of feeds and don't want to or can't run
// background processes while not running tt-rss, this method is generally
// viable to keep your feeds up to date.
// Still, there are more robust (and recommended) updating methods
// available, you can read about them here: http://tt-rss.org/wiki/UpdatingFeeds
// *****************************
// *** Files and directories ***
// *****************************
//FIXME
define('PHP_EXECUTABLE', '/usr/bin/php');
// Path to PHP executable, used for various command-line tt-rss programs
define('LOCK_DIRECTORY', 'lock');
// Directory for lockfiles, must be writable to the user you run
// daemon process or cronjobs under.
define('CACHE_DIR', 'cache');
// Local cache directory for RSS feed content.
define('ICONS_DIR', "feed-icons");
define('ICONS_URL', "feed-icons");
// Local and URL path to the directory, where feed favicons are stored.
// Unless you really know what you're doing, please keep those relative
// to tt-rss main directory.
// **********************
// *** Authentication ***
// **********************
// Please see PLUGINS below to configure various authentication modules.
define('AUTH_AUTO_CREATE', true);
// Allow authentication modules to auto-create users in tt-rss internal
// database when authenticated successfully.
define('AUTH_AUTO_LOGIN', true);
// Automatically login user on remote or other kind of externally supplied
// authentication, otherwise redirect to login form as normal.
// If set to true, users won't be able to set application language
// and settings profile.
// *********************
// *** Feed settings ***
// *********************
define('FORCE_ARTICLE_PURGE', 0);
// When this option is not 0, users ability to control feed purging
// intervals is disabled and all articles (which are not starred)
// older than this amount of days are purged.
// *** PubSubHubbub settings ***
define('PUBSUBHUBBUB_HUB', '');
// URL to a PubSubHubbub-compatible hub server. If defined, "Published
// articles" generated feed would automatically become PUSH-enabled.
define('PUBSUBHUBBUB_ENABLED', false);
// Enable client PubSubHubbub support in tt-rss. When disabled, tt-rss
// won't try to subscribe to PUSH feed updates.
// *********************
// *** Sphinx search ***
// *********************
define('SPHINX_ENABLED', false);
// Enable fulltext search using Sphinx (http://www.sphinxsearch.com)
// Please see http://tt-rss.org/wiki/SphinxSearch for more information.
define('SPHINX_INDEX', 'ttrss');
// Index name in Sphinx configuration. You can specify multiple indexes
// as a comma-separated string.
// ***********************************
// *** Self-registrations by users ***
// ***********************************
define('ENABLE_REGISTRATION', false);
// Allow users to register themselves. Please be vary that allowing
// random people to access your tt-rss installation is a security risk
// and potentially might lead to data loss or server exploit. Disabled
// by default.
define('REG_NOTIFY_ADDRESS', 'user@your.domain.dom');
// Email address to send new user notifications to.
define('REG_MAX_USERS', 10);
// Maximum amount of users which will be allowed to register on this
// system. 0 - no limit.
// **********************************
// *** Cookies and login sessions ***
// **********************************
define('SESSION_COOKIE_LIFETIME', 86400*30);
// Default lifetime of a session (e.g. login) cookie. In seconds,
// 0 means cookie will be deleted when browser closes.
// Setting this to zero will affect several user preferences
// like widescreen mode not saving.
define('SESSION_EXPIRE_TIME', 86400*30);
// Hard expiration limit for sessions. Should be
// greater or equal to SESSION_COOKIE_LIFETIME
define('SESSION_CHECK_ADDRESS', 1);
// Check client IP address when validating session:
// 0 - disable checking
// 1 - check first 3 octets of an address (recommended)
// 2 - check first 2 octets of an address
// 3 - check entire address
// *********************************
// *** Email and digest settings ***
// *********************************
define('SMTP_FROM_NAME', 'Tiny Tiny RSS');
define('SMTP_FROM_ADDRESS', 'noreply@your.domain.dom');
// Name, address and subject for sending outgoing mail. This applies
// to password reset notifications, digest emails and any other mail.
define('DIGEST_SUBJECT', '[tt-rss] New headlines for last 24 hours');
// Subject line for email digests
define('SMTP_HOST', '');
// SMTP Host to send outgoing mail. Blank - use system MTA.
define('SMTP_PORT','');
// SMTP port to sent outgoing mail. Default is 25.
define('SMTP_LOGIN', '');
define('SMTP_PASSWORD', '');
// These two options enable SMTP authentication when sending
// outgoing mail. Only used with SMTP_HOST
// ***************************************
// *** Other settings (less important) ***
// ***************************************
define('CHECK_FOR_NEW_VERSION', false);
// Check for new versions of tt-rss automatically.
define('ENABLE_GZIP_OUTPUT', false);
// Selectively gzip output to improve wire performance. This requires
// PHP Zlib extension on the server.
// Enabling this can break tt-rss in several httpd/php configurations,
// if you experience weird errors and tt-rss failing to start, blank pages
// after login, or content encoding errors, disable it.
define('PLUGINS', 'auth_remote, auth_internal, note');
// Comma-separated list of plugins to load automatically for all users.
// System plugins have to be specified here. Please enable at least one
// authentication plugin here (auth_*).
// Users may enable other user plugins from Preferences/Plugins but may not
// disable plugins specified in this list.
define('CONFIG_VERSION', 26);
// Expected config version. Please update this option in config.php
// if necessary (after migrating all new options from this file).
// vim:ft=php
?>
...@@ -37,4 +37,7 @@ depends = ${caucase-jinja2-library-eggs:eggs} ...@@ -37,4 +37,7 @@ depends = ${caucase-jinja2-library-eggs:eggs}
[versions] [versions]
caucase = 0.9.12 caucase = 0.9.12
pem = 21.1.0 pem = 21.1.0
PyJWT = 2.4.0
[versions:python2]
PyJWT = 1.7.1 PyJWT = 1.7.1
...@@ -59,6 +59,8 @@ extends = ...@@ -59,6 +59,8 @@ extends =
../../component/wendelin.core/buildout.cfg ../../component/wendelin.core/buildout.cfg
../../component/jupyter-py2/buildout.cfg ../../component/jupyter-py2/buildout.cfg
../../component/pygolang/buildout.cfg ../../component/pygolang/buildout.cfg
../../component/bcrypt/buildout.cfg
../../component/python-pynacl/buildout.cfg
../../stack/caucase/buildout.cfg ../../stack/caucase/buildout.cfg
../../software/neoppod/software-common.cfg ../../software/neoppod/software-common.cfg
# keep neoppod extends last # keep neoppod extends last
...@@ -475,6 +477,8 @@ eggs = ${neoppod:eggs} ...@@ -475,6 +477,8 @@ eggs = ${neoppod:eggs}
mock mock
oauthlib oauthlib
objgraph objgraph
${python-pynacl:egg}
${bcrypt:egg}
paramiko paramiko
ply ply
pyflakes pyflakes
...@@ -701,7 +705,6 @@ pprofile = 2.0.4 ...@@ -701,7 +705,6 @@ pprofile = 2.0.4
pyasn1-modules = 0.0.8 pyasn1-modules = 0.0.8
pycountry = 17.1.8 pycountry = 17.1.8
pycrypto = 2.6.1 pycrypto = 2.6.1
pycurl = 7.43.0
pyflakes = 1.5.0 pyflakes = 1.5.0
python-memcached = 1.58 python-memcached = 1.58
pytracemalloc = 1.2 pytracemalloc = 1.2
...@@ -726,7 +729,6 @@ dask = 0.18.1 ...@@ -726,7 +729,6 @@ dask = 0.18.1
toolz = 0.9.0 toolz = 0.9.0
zope.globalrequest = 1.5 zope.globalrequest = 1.5
waitress = 1.4.4 waitress = 1.4.4
xlrd = 1.1.0
Products.ZSQLMethods = 2.13.5 Products.ZSQLMethods = 2.13.5
fpconst = 0.7.2 fpconst = 0.7.2
...@@ -736,11 +738,8 @@ python-libmilter = 1.0.3 ...@@ -736,11 +738,8 @@ python-libmilter = 1.0.3
zope.app.debug = 3.4.1 zope.app.debug = 3.4.1
zope.app.dependable = 3.5.1 zope.app.dependable = 3.5.1
zope.app.form = 4.0.2 zope.app.form = 4.0.2
et-xmlfile = 1.0.1
h5py = 2.7.1
mpmath = 0.19 mpmath = 0.19
openpyxl = 2.4.8 openpyxl = 2.4.8
sympy = 1.1.1
jdcal = 1.3 jdcal = 1.3
deepdiff = 3.3.0 deepdiff = 3.3.0
unidiff = 0.5.5 unidiff = 0.5.5
......
...@@ -167,7 +167,7 @@ importlib-metadata = 1.7.0:whl ...@@ -167,7 +167,7 @@ importlib-metadata = 1.7.0:whl
inotify-simple = 1.1.1 inotify-simple = 1.1.1
itsdangerous = 0.24 itsdangerous = 0.24
lock-file = 2.0 lock-file = 2.0
lxml = 4.6.5 lxml = 4.9.1
meld3 = 1.0.2 meld3 = 1.0.2
mock = 3.0.5 mock = 3.0.5
more-itertools = 5.0.0 more-itertools = 5.0.0
...@@ -179,7 +179,7 @@ plone.recipe.command = 1.1 ...@@ -179,7 +179,7 @@ plone.recipe.command = 1.1
prettytable = 0.7.2 prettytable = 0.7.2
psutil = 5.8.0 psutil = 5.8.0
pluggy = 0.13.1:whl pluggy = 0.13.1:whl
py = 1.9.0:whl py = 1.11.0:whl
pyOpenSSL = 19.1.0 pyOpenSSL = 19.1.0
pyparsing = 3.0.9:whl pyparsing = 3.0.9:whl
py-mld = 1.0.3 py-mld = 1.0.3
...@@ -208,12 +208,14 @@ unicodecsv = 0.14.1 ...@@ -208,12 +208,14 @@ unicodecsv = 0.14.1
wcwidth = 0.2.5 wcwidth = 0.2.5
wheel = 0.35.1:whl wheel = 0.35.1:whl
xml-marshaller = 1.0.2 xml-marshaller = 1.0.2
zc.lockfile = 1.0.2 zc.lockfile = 1.4
zdaemon = 4.2.0 zdaemon = 4.2.0
zipp = 1.2.0:whl zipp = 1.2.0:whl
zodburi = 2.5.0 zodburi = 2.5.0
zope.event = 3.5.2 zope.event = 3.5.2
paramiko = 2.1.3 paramiko = 2.11.0
PyNaCl = 1.3.0
bcrypt = 3.1.4
CacheControl = 0.12.6:whl CacheControl = 0.12.6:whl
msgpack = 0.6.2 msgpack = 0.6.2
Flask = 1.1.2 Flask = 1.1.2
...@@ -235,7 +237,6 @@ pytest-runner = 5.2:whl ...@@ -235,7 +237,6 @@ pytest-runner = 5.2:whl
ipaddress = 1.0.23 ipaddress = 1.0.23
jsonschema = 3.0.2:whl jsonschema = 3.0.2:whl
lockfile = 0.12.2:whl lockfile = 0.12.2:whl
# XXX 'slapos node format' raises an exception with netifaces 0.10.5.
netifaces = 0.10.7 netifaces = 0.10.7
packaging = 16.8 packaging = 16.8
passlib = 1.7.1 passlib = 1.7.1
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment