Commit 7c84696d authored by Łukasz Nowak's avatar Łukasz Nowak

caddy-frontend: Expose backend log files to slaves

Backend logs are exposed as usual access and error logs.

By using rsyslogd templates and regex filtering, the rsyslogd reacts itself
and creates needed files per each slave which accesses it. Thanks to this, it's
configuration is static from point of view of SlapOS profiles, and can be
generated once.

As the rsyslogd configuration became fully special to backend-haproxy, the
rsyslogd template filename and its references has been correctly renamed.
parent d26aa96f
......@@ -22,7 +22,7 @@ md5sum = c801b7f9f11f0965677c22e6bbe9281b
[template-apache-frontend]
filename = instance-apache-frontend.cfg.in
md5sum = 23237969bbd9e974ac674b2052e8d67c
md5sum = 57401c2ad08e3de286be343fa4913fdc
[template-caddy-replicate]
filename = instance-apache-replicate.cfg.in
......@@ -30,7 +30,7 @@ md5sum = 19debfbc27c464f451b1eb5bb5ce3c84
[template-slave-list]
_update_hash_filename_ = templates/apache-custom-slave-list.cfg.in
md5sum = 5f1d2fa5529e3ee8c6a99834e8374c48
md5sum = 6246537516c3240e4fd1784c54c03760
[template-replicate-publish-slave-information]
_update_hash_filename_ = templates/replicate-publish-slave-information.cfg.in
......@@ -116,6 +116,6 @@ md5sum = 38792c2dceae38ab411592ec36fff6a8
filename = instance-kedifa.cfg.in
md5sum = 9d6111a5d6bc07e708116ca331925241
[template-rsyslogd-conf]
_update_hash_filename_ = templates/rsyslogd.conf.in
md5sum = a4135eec94d9febaf6fb51d885886175
[template-backend-haproxy-rsyslogd-conf]
_update_hash_filename_ = templates/backend-haproxy-rsyslogd.conf.in
md5sum = be899b04e1aa652ed510f20d4ea523dd
......@@ -113,7 +113,7 @@ xz_location = ${xz-utils:location}
monitor_template = ${monitor-template:output}
template_backend_haproxy_configuration = ${template-backend-haproxy-configuration:target}
template_rsyslogd_conf = ${template-rsyslogd-conf:target}
template_backend_haproxy_rsyslogd_conf = ${template-backend-haproxy-rsyslogd-conf:target}
template_caddy_frontend_configuration = ${template-caddy-frontend-configuration:target}
template_graceful_script = ${template-graceful-script:target}
template_validate_script = ${template-validate-script:target}
......@@ -228,5 +228,5 @@ mode = 0644
[template-configuration-state-script]
<=download-template
[template-rsyslogd-conf]
[template-backend-haproxy-rsyslogd-conf]
<=download-template
......@@ -43,12 +43,14 @@ parts =
monitor-caddy-server-status-wrapper
monitor-verify-re6st-connectivity
backend-haproxy-rsyslogd-configuration
backend-haproxy-rsyslogd
logrotate-entry-backend-haproxy
backend-haproxy
backend-haproxy-graceful
promise-backend-haproxy-http
promise-backend-haproxy-https
promise-backend-haproxy-configuration
logrotate-entry-backend-haproxy
# Create all needed directories
[directory]
......@@ -157,7 +159,7 @@ context =
template-empty = {{ parameter_dict['template_empty'] }}
template-default-slave-virtualhost = {{ parameter_dict['template_default_slave_virtualhost'] }}
template-backend-haproxy-configuration = {{ parameter_dict['template_backend_haproxy_configuration'] }}
template-rsyslogd-conf = {{ parameter_dict['template_rsyslogd_conf'] }}
template-backend-haproxy-rsyslogd-conf = {{ parameter_dict['template_backend_haproxy_rsyslogd_conf'] }}
caddy-location = {{ parameter_dict['caddy_location'] }}
[kedifa-login-config]
......@@ -698,7 +700,7 @@ config-port = ${backend-haproxy-configuration:https-port}
[backend-haproxy-configuration]
file = ${directory:etc}/backend-haproxy.cfg
pid-file = ${directory:run}/backend-haproxy.pid
log-socket = ${backend-haproxy-rsyslogd:log-socket}
log-socket = ${backend-haproxy-rsyslogd-config:log-socket}
graceful-command = ${backend-haproxy-validate:rendered} && kill -USR2 $(cat ${:pid-file})
http-port = ${configuration:backend-haproxy-http-port}
https-port = ${configuration:backend-haproxy-https-port}
......@@ -716,12 +718,29 @@ command-line = {{ parameter_dict['haproxy_executable'] }} -f ${backend-haproxy-c
wrapper-path = ${directory:service}/backend-haproxy
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
[backend-haproxy-rsyslogd-lazy-graceful]
< = jinja2-template-base
template = {{ parameter_dict['template_caddy_lazy_script_call'] }}
rendered = ${directory:bin}/backend-haproxy-rsyslogd-lazy-graceful
mode = 0700
pid-file = ${directory:run}/backend-haproxy-rsyslogd-lazy-graceful.pid
wait_time = 60
extra-context =
key pid_file :pid-file
key wait_time :wait_time
key lazy_command backend-haproxy-rsyslogd-config:graceful-command
[logrotate-entry-backend-haproxy]
<= logrotate-entry-base
name = backend-haproxy
log = ${backend-haproxy-rsyslogd-configuration:log-file}
log = ${backend-haproxy-rsyslogd-config:log-file}
rotate-num = 30
post = kill -HUP $(cat ${backend-haproxy-rsyslogd-configuration:pid-file})
# Note: Slaves do not define their own reload, as this would be repeated,
# because sharedscripts work per entry, and each slave needs its own
# olddir
# Here we trust that there will be something to be rotated with error
# or access log, and that this will trigger postrotate script.
post = ${backend-haproxy-rsyslogd-lazy-graceful:rendered} &
[backend-haproxy-configuration-state]
<= jinja2-template-base
......@@ -767,18 +786,6 @@ extra-context =
key configuration_state_command backend-haproxy-configuration-state-validate:rendered
key last_state_file :last_state_file
[backend-haproxy-lazy-graceful]
< = jinja2-template-base
template = {{ parameter_dict['template_caddy_lazy_script_call'] }}
rendered = ${directory:bin}/backend-haproxy-lazy-graceful
mode = 0700
pid-file = ${directory:run}/backend-haproxy-lazy-graceful.pid
wait_time = 60
extra-context =
key pid_file :pid-file
key wait_time :wait_time
key lazy_command backend-haproxy-configuration:graceful-command
[promise-backend-haproxy-configuration]
<= monitor-promise-base
module = validate_frontend_configuration
......@@ -796,27 +803,26 @@ content =
context =
key content :content
[backend-haproxy-rsyslogd-configuration]
<= jinja2-template-base
template = {{ parameter_dict['template_rsyslogd_conf'] }}
rendered = ${directory:etc}/backend-haproxy-rsyslogd.conf
# Note: log-socket shall be backend-haproxy-rsyslogd.sock to refer the part name,
# but it results with socket path limit
[backend-haproxy-rsyslogd-config]
log-socket = ${directory:run}/bhlog.sck
log-file = ${directory:log}/backend-haproxy.log
pid-file = ${directory:run}/backend-haproxy-rsyslogd.pid
spool-directory = ${directory:backend-haproxy-rsyslogd-spool}
graceful-command = kill -HUP $(cat ${:pid-file})
caddy-log-directory = ${caddy-directory:slave-log}
[backend-haproxy-rsyslogd-configuration]
<= jinja2-template-base
template = ${software-release-path:template-backend-haproxy-rsyslogd-conf}
rendered = ${directory:etc}/backend-haproxy-rsyslogd.conf
extra-context =
key socket :log-socket
key log_file :log-file
key spool_directory directory:backend-haproxy-rsyslogd-spool
section configuration backend-haproxy-rsyslogd-config
[backend-haproxy-rsyslogd]
recipe = slapos.cookbook:wrapper
command-line = {{ parameter_dict['rsyslogd_executable'] }} -i ${backend-haproxy-rsyslogd-configuration:pid-file} -n -f ${backend-haproxy-rsyslogd-configuration:rendered}
command-line = {{ parameter_dict['rsyslogd_executable'] }} -i ${backend-haproxy-rsyslogd-config:pid-file} -n -f ${backend-haproxy-rsyslogd-configuration:rendered}
wrapper-path = ${directory:service}/backend-haproxy-rsyslogd
hash-existing-files = ${buildout:directory}/software_release/buildout.cfg
log-socket = ${backend-haproxy-rsyslogd-configuration:log-socket}
log-file = ${backend-haproxy-rsyslogd-configuration:log-file}
#######
# Monitoring sections
......
......@@ -119,8 +119,10 @@ create = true
{#- Set Up log files #}
{%- do slave_parameter_dict.__setitem__('access_log', '/'.join([caddy_log_directory, '%s_access_log' % slave_reference])) %}
{%- do slave_parameter_dict.__setitem__('error_log', '/'.join([caddy_log_directory, '%s_error_log' % slave_reference])) %}
{%- do slave_parameter_dict.__setitem__('backend_log', '/'.join([caddy_log_directory, '%s_backend_log' % slave_reference])) %}
{%- do slave_instance.__setitem__('access_log', slave_parameter_dict.get('access_log')) %}
{%- do slave_instance.__setitem__('error_log', slave_parameter_dict.get('error_log')) %}
{%- do slave_instance.__setitem__('backend_log', slave_parameter_dict.get('backend_log')) %}
{#- Add slave log directory to the slave log access dict #}
{%- do slave_log_dict.__setitem__(slave_reference, slave_log_folder) %}
{%- set slave_log_access_url = 'https://' + slave_reference.lower() + ':${'+ slave_password_section +':passwd}@[' + frontend_configuration.get('caddy-ipv6') + ']:' + frontend_configuration.get('caddy-https-port') + '/' + slave_reference.lower() + '/' %}
......@@ -148,7 +150,7 @@ create = true
[{{slave_logrotate_section}}]
<= logrotate-entry-base
name = ${:_buildout_section_name_}
log = {{slave_parameter_dict.get('access_log')}} {{slave_parameter_dict.get('error_log')}}
log = {{slave_parameter_dict.get('access_log')}} {{slave_parameter_dict.get('error_log')}} {{slave_parameter_dict.get('backend_log')}}
backup = {{ slave_log_folder }}
{#- integrate current logs inside #}
......@@ -157,7 +159,7 @@ backup = {{ slave_log_folder }}
recipe = plone.recipe.command
stop-on-error = false
update-command = ${:command}
command = ln -sf {{slave_parameter_dict.get('error_log')}} {{ slave_log_folder }}/error.log && ln -sf {{slave_parameter_dict.get('access_log')}} {{ slave_log_folder }}/access.log
command = ln -sf {{slave_parameter_dict.get('error_log')}} {{ slave_log_folder }}/error.log && ln -sf {{slave_parameter_dict.get('access_log')}} {{ slave_log_folder }}/access.log && ln -sf {{slave_parameter_dict.get('backend_log')}} {{ slave_log_folder }}/backend.log
{#- Set password for slave #}
......
module(
load="imuxsock"
SysSock.Name="{{ socket }}")
SysSock.Name="{{ configuration['log-socket'] }}")
# Just simply output the raw line without any additional information, as
# haproxy emits enough information by itself
......@@ -13,6 +13,17 @@ $FileCreateMode 0600
$DirCreateMode 0700
$Umask 0022
$WorkDirectory {{ spool_directory }}
$WorkDirectory {{ configuration['spool-directory'] }}
*.* {{ log_file }}
# Setup logging per slave, by extracting the slave name from the log stream
{%- set regex = ".*-backend (.*)-http.*" %}
template(name="extract_slave_name" type="string" string="%msg:R,ERE,1,FIELD:{{ regex }}--end%")
set $!slave_name = exec_template("extract_slave_name");
template(name="slave_output" type="string" string="{{ configuration['caddy-log-directory'] }}/%$!slave_name%_backend_log")
if (re_match($msg, "{{ regex }}")) then {
action(type="omfile" dynaFile="slave_output")
stop
}
{#- emit all not catched messages to full log file #}
*.* {{ configuration['log-file'] }}
......@@ -719,6 +719,14 @@ class HttpFrontendTestCase(SlapOSInstanceTestCase):
httplib.OK,
requests.get(url + 'error.log', verify=False).status_code
)
# assert only for few tests, as backend log is not available for many of
# them, as it's created on the fly
for test_name in ['test_url', 'test_auth_to_backend', 'test_compressed_result']:
if self.id().endswith(test_name)
self.assertEqual(
httplib.OK,
requests.get(url + 'backend.log', verify=False).status_code
)
def assertKedifaKeysWithPop(self, parameter_dict, prefix=''):
generate_auth_url = parameter_dict.pop('%skey-generate-auth-url' % (
......@@ -1625,20 +1633,6 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
self.assertEqual(httplib.SERVICE_UNAVAILABLE, result.status_code)
# check that log file contains verbose log
log_file = glob.glob(
os.path.join(
self.instance_path, '*', 'var', 'log', 'httpd', '_empty_access_log'
))[0]
log_regexp = r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} - - ' \
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2} \+\d{4}\] ' \
r'"GET \/test-path HTTP\/1.1" \d{3} \d+ "-" '\
r'"python-requests.*" \d+'
self.assertRegexpMatches(
open(log_file, 'r').readlines()[-1],
log_regexp)
result_http = fakeHTTPResult(
parameter_dict['domain'], parameter_dict['public-ipv4'], 'test-path')
self.assertEqual(
......@@ -1737,6 +1731,39 @@ class TestSlave(SlaveHttpFrontendTestCase, TestDataMixin):
result.headers['Set-Cookie']
)
# check access log
log_file = glob.glob(
os.path.join(
self.instance_path, '*', 'var', 'log', 'httpd', '_Url_access_log'
))[0]
log_regexp = r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} - - ' \
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2} \+\d{4}\] ' \
r'"GET \/test-path\/deep\/..\/.\/deeper HTTP\/1.1" \d{3} ' \
r'\d+ "-" "python-requests.*" \d+'
self.assertRegexpMatches(
open(log_file, 'r').readlines()[-1],
log_regexp)
# check backend log
log_file = glob.glob(
os.path.join(
self.instance_path, '*', 'var', 'log', 'httpd', '_Url_backend_log'
))[0]
log_regexp = r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+ ' \
r'\[\d{2}\/.{3}\/\d{4}\:\d{2}\:\d{2}\:\d{2}.\d{3}\] ' \
r'http-backend _Url-http\/backend ' \
r'\d+/\d+\/\d+\/\d+\/\d+ ' \
r'200 \d+ - - ---- ' \
r'\d\/\d\/\d\/\d\/\d \d\/\d ' \
r'"GET /test-path/deeper HTTP/1.1"'
self.assertRegexpMatches(
open(log_file, 'r').readlines()[-1],
log_regexp)
result_http = fakeHTTPResult(
parameter_dict['domain'], parameter_dict['public-ipv4'],
'test-path/deep/.././deeper')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment