Commit 76a7aa96 authored by Joanne Hugé's avatar Joanne Hugé

WIP

parent 38f7e51b
import errno
import os
import time
from .util import JSONPromise
from .util import get_json_log_latest_timestamp
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
self.stats_period = int(self.getConfig('stats-period'))
def sense(self):
latest_timestamp = get_json_log_latest_timestamp(self.amarisoft_stats_log)
delta = time.time() - latest_timestamp
if delta > self.stats_period * 2:
self.logger.error("Latest entry from amarisoft statistics log too "\
"old (%s seconds old)" % (delta,))
else:
self.logger.info("Latest entry from amarisoft statistics is "\
"%s seconds old" % (delta,))
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
import errno
import json
import logging
import os
from dateutil import parser
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
# Get all data in the last "interval" seconds from JSON log
def get_data_interval(log, interval):
log_number = 0
latest_timestamp = 0
data_list = []
while True:
try:
f = open("{}.{}".format(log, log_number) if log_number else log, "rb")
except OSError:
return data_list
try:
f.seek(0, os.SEEK_END)
while True:
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
break
pos = f.tell()
l = json.loads(f.readline().decode().replace("'", '"'))
timestamp = parser.parse(l['time'])
data_list.append(l['data'])
if not latest_timestamp:
latest_timestamp = timestamp
if (latest_timestamp - timestamp).total_seconds() > interval:
return data_list
f.seek(pos, os.SEEK_SET)
finally:
f.close()
log_number += 1
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.__title = os.path.splitext(self.__name)[0]
self.__log_file = os.path.join(self.__log_folder, '%s.json.log' % self.__title)
self.json_logger = logging.getLogger('json_logger')
self.json_logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.__log_file)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.json_logger.addHandler(handler)
def sense(self):
testing = self.getConfig('testing') == "True"
if testing:
self.logger.info("skipping promise")
return
amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
stats_period = int(self.getConfig('stats-period'))
min_rxtx_delay_threshold = float(self.getConfig('min-rxtx-delay', 5))
avg_rxtx_delay_threshold = float(self.getConfig('avg-rxtx-delay', 7))
data_list = get_data_interval(amarisoft_stats_log, stats_period * 2)
min_rxtx_delay_it = filter(lambda x: float(x['rf']['rxtx_delay_min']), data_list)
avg_rxtx_delay_it = filter(lambda x: float(x['rf']['rxtx_delay_avg']), data_list)
if not min_rxtx_delay_it or not avg_rxtx_delay_it:
self.logger.error("No TX/RX diff data available")
else:
min_rxtx_delay = min(min_rxtx_delay_it)
avg_rxtx_delay = min(avg_rxtx_delay_it)
if min_rxtx_delay > min_rxtx_delay_threshold:
self.logger.error("The minimum available time %s (ms) for radio front end processing is higher than a threshold %s (ms) depending on the radio front end." % (min_rxtx_delay, min_rxtx_delay_threshold))
if avg_rxtx_delay > avg_rxtx_delay_threshold:
self.logger.error("The average available time %s (ms) for radio front end processing is higher than a threshold %s (ms) depending on the radio front end." % (avg_rxtx_delay, avg_rxtx_delay_threshold))
else:
self.logger.info("The minimum %s (ms) and average %s (ms) available time for radio front end processing OK" % (min_rxtx_delay, avg_rxtx_delay))
self.json_logger.info("Minimum and average available time for radio front end processing (ms)",
extra={'data': {'min_rxtx_delay': min_rxtx_delay, 'avg_rxtx_delay': avg_rxtx_delay}})
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
......@@ -50,7 +50,7 @@ class RunPromise(JSONPromise):
t = 0
if (time.time() - t) > avg_computation_period:
open(self.avg_flag_file, 'w').close()
temp_list = self.getJsonLogDataInterval(self.avg_temp_duration)
temp_list = self.get_json_log_data_interval(self.avg_temp_duration)
if temp_list:
avg_temp = sum(x['cpu_temperature'] for x in temp_list) / len(temp_list)
if avg_temp > self.max_avg_temp:
......
import socket
import errno
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=2)
def sense(self):
ifname = self.getConfig('ifname')
testing = self.getConfig('testing') == "True"
if testing:
self.logger.info("skipping promise")
return
f = open('/sys/class/net/%s/operstate' % ifname, 'r')
if f.read() == 'up\n':
self.logger.info("%s is up", ifname)
else:
self.logger.error("%s is down", ifname)
f.close()
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=3, failure_amount=2)
......@@ -39,7 +39,7 @@ class RunPromise(JSONPromise):
# can be heavy in computation
if (time.time() - t) > self.transit_period / 4:
open(self.last_transit_file, 'w').close()
temp_list = self.getJsonLogDataInterval(self.transit_period)
temp_list = self.get_json_log_data_interval(self.transit_period)
if temp_list:
# If no previous data in log
if len(temp_list) == 1:
......
......@@ -43,7 +43,7 @@ class RunPromise(JSONPromise):
# Get last available RAM from log file since avg_ram_period / 4
if (time.time() - t) > self.avg_ram_period / 4:
open(self.last_avg_ram_file, 'w').close()
temp_list = self.getJsonLogDataInterval(self.avg_ram_period)
temp_list = self.get_json_log_data_interval(self.avg_ram_period)
if temp_list:
avg_ram = sum(map(lambda x: x['available_ram'], temp_list)) / len(temp_list)
if avg_ram < self.min_avg_ram:
......
import errno
import json
import logging
import os
from dateutil import parser
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
# Get all data in the last "interval" seconds from JSON log
def get_data_interval(log, interval):
log_number = 0
latest_timestamp = 0
data_list = []
while True:
try:
f = open("{}.{}".format(log, log_number) if log_number else log, "rb")
except OSError:
return data_list
try:
f.seek(0, os.SEEK_END)
while True:
try:
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
pass
except OSError:
break
pos = f.tell()
l = json.loads(f.readline().decode().replace("'", '"'))
timestamp = parser.parse(l['time'])
data_list.append(l['data'])
if not latest_timestamp:
latest_timestamp = timestamp
if (latest_timestamp - timestamp).total_seconds() > interval:
return data_list
f.seek(pos, os.SEEK_SET)
finally:
f.close()
log_number += 1
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
self.__name = config.get('name', None)
self.__log_folder = config.get('log-folder', None)
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
self.__title = os.path.splitext(self.__name)[0]
self.__log_file = os.path.join(self.__log_folder, '%s.json.log' % self.__title)
self.json_logger = logging.getLogger('json_logger')
self.json_logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.__log_file)
formatter = logging.Formatter('{"time": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", "data": %(data)s}')
handler.setFormatter(formatter)
self.json_logger.addHandler(handler)
def sense(self):
testing = self.getConfig('testing') == "True"
amarisoft_stats_log = self.getConfig('amarisoft-stats-log')
stats_period = int(self.getConfig('stats-period'))
max_rx_sample_db = float(self.getConfig('max-rx-sample-db'))
data_list = get_data_interval(amarisoft_stats_log, stats_period * 2)
max_rx = []
saturated = False
for data in data_list:
rx_list = data['samples']['rx']
if not max_rx:
max_rx = [-99.9 for x in rx_list]
for i, rx in enumerate(rx_list):
max_rx[i] = max(max_rx[i], float(rx['max']))
if max_rx[i] >= max_rx_sample_db:
saturated = True
self.json_logger.info("RX maximum sample values (dB)",
extra={'data': max_rx})
if not max_rx:
self.logger.error("No RX samples data available")
elif saturated:
self.logger.error("RX antenna saturated, please lower rx_gain")
else:
self.logger.info("No saturation detected on RX antenna")
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
import os
import errno
import subprocess
from zope.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise.generic import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
def __init__(self, config):
super(RunPromise, self).__init__(config)
self.setPeriodicity(minute=1)
def sense(self):
testing = self.getConfig('testing') == "True"
sdr = self.getConfig('sdr')
if testing:
self.logger.info("skipping promise")
return
try:
out = subprocess.check_output([
sdr + '/sdr_util', '-c', '0', 'version'], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if e.returncode == 1 and \
("DMA channel is already opened" in e.output.decode() or \
"Device or resource busy" in e.output.decode()):
self.logger.info("eNB is using /dev/sdr0")
return
self.logger.error("eNB is not using /dev/sdr0")
def test(self):
"""
Called after sense() if the instance is still converging.
Returns success or failure based on sense results.
In this case, fail if the previous sensor result is negative.
"""
return self._test(result_count=1, failure_amount=1)
def anomaly(self):
"""
Called after sense() if the instance has finished converging.
Returns success or failure based on sense results.
Failure signals the instance has diverged.
In this case, fail if two out of the last three results are negative.
"""
return self._anomaly(result_count=1, failure_amount=1)
......@@ -7,6 +7,7 @@ import textwrap
from dateutil import parser as dateparser
from datetime import datetime
from slapos.grid.promise.generic import GenericPromise
from slapos.grid.promise.generic import GenericPromise
def iter_reverse_lines(f):
......@@ -38,6 +39,33 @@ def iter_logrotate_file_handle(path, mode='r'):
except OSError:
break
def get_json_log_data_interval(json_log_file, interval):
"""
Get all data in the last "interval" seconds from JSON log
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
"""
current_time = datetime.now()
data_list = []
for f in iter_logrotate_file_handle(json_log_file, 'rb'):
for line in iter_reverse_lines(f):
l = json.loads(line.decode().replace("'", '"'))
timestamp = dateparser.parse(l['time'])
if (current_time - timestamp).total_seconds() > interval:
return data_list
data_list.append(l['data'])
return data_list
def get_json_log_latest_timestamp(json_log_file):
"""
Get latest timestamp from JSON log
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
"""
for f in iter_logrotate_file_handle(json_log_file, 'rb'):
for line in iter_reverse_lines(f):
l = json.loads(line.decode().replace("'", '"'))
return dateparser.parse(l['time']).timestamp()
return 0
class JSONPromise(GenericPromise):
def __init__(self, config):
......@@ -47,9 +75,9 @@ class JSONPromise(GenericPromise):
super(JSONPromise, self).__init__(config)
json_log_name = os.path.splitext(self.__name)[0] + '.json.log'
self.__json_log_file = os.path.join(self.__log_folder, json_log_name)
self.json_logger = self.__makeJsonLogger(self.__json_log_file)
self.json_logger = self.__make_json_logger(self.__json_log_file)
def __makeJsonLogger(self, json_log_file):
def __make_json_logger(self, json_log_file):
logger = logging.getLogger('json-logger')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(json_log_file)
......@@ -61,35 +89,8 @@ class JSONPromise(GenericPromise):
logger.addHandler(handler)
return logger
def getJsonLogDataInterval(self, interval):
"""
Get all data in the last "interval" seconds from JSON log
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
"""
current_time = datetime.now()
data_list = []
for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'):
for line in iter_reverse_lines(f):
l = json.loads(line.decode().replace("'", '"'))
timestamp = dateparser.parse(l['time'])
if (current_time - timestamp).total_seconds() > interval:
return data_list
data_list.append(l['data'])
return data_list
def getJsonLogLatestTimestamp(log):
"""
Get latest timestamp from JSON log
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
"""
for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'):
for line in iter_reverse_lines(f):
l = json.loads(line.decode().replace("'", '"'))
return dateparser.parse(l['time'])
return 0
from dateutil import parser
from slapos.grid.promise.generic import GenericPromise
def get_json_log_data_interval(self, interval):
return get_json_log_data_interval(self.__json_log_file, interval)
def tail_file(file_path, line_count=10):
"""
......
# -*- coding: utf-8 -*-
##############################################################################
# Copyright (c) 2018 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
##############################################################################
import mock
import os
import time
from datetime import datetime
from datetime import timedelta
from slapos.grid.promise import PromiseError
from slapos.promise.plugin.check_amarisoft_stats_log import RunPromise
from . import TestPromisePluginMixin
class TestCheckAmarisoftStatsLog(TestPromisePluginMixin):
promise_name = "check-amarisoft-stats-log.py"
def setUp(self):
super(TestCheckAmarisoftStatsLog, self).setUp()
self.amarisoft_stats_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'amarisoft_stats.json.log')
with open(self.amarisoft_stats_log, 'w+') as f:
f.write("""{"time": "%s", "log_level": "INFO", "message": "Samples stats", "data": {}}
{"time": "%s", "log_level": "INFO", "message": "Samples stats", "data": {}}
{"time": "%s", "log_level": "INFO", "message": "Samples stats", "data": {}}""" % (
(datetime.now() - timedelta(seconds=25)).strftime("%Y-%m-%d %H:%M:%S,%f")[:-3],
(datetime.now() - timedelta(seconds=15)).strftime("%Y-%m-%d %H:%M:%S,%f")[:-3],
(datetime.now() - timedelta(seconds=5)).strftime("%Y-%m-%d %H:%M:%S,%f")[:-3],
))
def writePromise(self, **kw):
super(TestCheckAmarisoftStatsLog, self).writePromise(self.promise_name,
"from %s import %s\nextra_config_dict = %r\n"
% (RunPromise.__module__, RunPromise.__name__, kw))
def test_promise_success(self):
self.writePromise(**{
'amarisoft-stats-log': self.amarisoft_stats_log,
'stats-period': 10,
})
self.configureLauncher()
self.launcher.run()
def test_promise_fail(self):
self.writePromise(**{
'amarisoft-stats-log': self.amarisoft_stats_log,
'stats-period': 1,
})
self.configureLauncher()
with self.assertRaises(PromiseError):
self.launcher.run()
if __name__ == '__main__':
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment