promise: Merged check_free_disk_space and monitor_partition_space from...
promise: Merged check_free_disk_space and monitor_partition_space from @lisa.casino & @@xavier_thompson
Showing
import itertools | ||
import json | ||
import logging | ||
import os | ||
import textwrap | ||
from dateutil import parser as dateparser | ||
from slapos.grid.promise.generic import GenericPromise | ||
def iter_reverse_lines(f): | ||
""" | ||
Read lines from the end of the file | ||
""" | ||
f.seek(0, os.SEEK_END) | ||
while True: | ||
try: | ||
while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n': | ||
pass | ||
except OSError: | ||
return | ||
pos = f.tell() | ||
yield f.readline() | ||
f.seek(pos, os.SEEK_SET) | ||
def iter_logrotate_file_handle(path, mode='r'): | ||
""" | ||
Yield successive file handles for rotated logs | ||
(XX.log, XX.log.1, XX.log.2, ...) | ||
""" | ||
for i in itertools.count(): | ||
path_i = path + str(i or '') | ||
|
||
try: | ||
with open(path_i, mode) as f: | ||
yield f | ||
except OSError: | ||
break | ||
class JSONPromise(GenericPromise): | ||
def __init__(self, config): | ||
self.__name = config.get('name', None) | ||
self.__log_folder = config.get('log-folder', None) | ||
super(JSONPromise, self).__init__(config) | ||
json_log_name = os.path.splitext(self.__name)[0] + '.json.log' | ||
self.__json_log_file = os.path.join(self.__log_folder, json_log_name) | ||
self.json_logger = self.__makeJsonLogger(self.__json_log_file) | ||
def __makeJsonLogger(self, json_log_file): | ||
logger = logging.getLogger('json-logger') | ||
logger.setLevel(logging.INFO) | ||
handler = logging.FileHandler(json_log_file) | ||
formatter = logging.Formatter( | ||
'{"time": "%(asctime)s", "log_level": "%(levelname)s"' | ||
', "message": "%(message)s", "data": %(data)s}' | ||
) | ||
handler.setFormatter(formatter) | ||
logger.addHandler(handler) | ||
return logger | ||
def getJsonLogDataInterval(self, interval): | ||
""" | ||
Get all data in the last "interval" seconds from JSON log | ||
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...) | ||
""" | ||
oldest_timestamp = None | ||
data_list = [] | ||
for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'): | ||
for line in iter_reverse_lines(f): | ||
l = json.loads(line.decode().replace("'", '"')) | ||
timestamp = dateparser.parse(l['time']) | ||
data_list.append(l['data']) | ||
oldest_timestamp = oldest_timestamp or timestamp | ||
if (oldest_timestamp - timestamp).total_seconds() > interval: | ||
return data_list | ||
return data_list | ||
def getJsonLogLatestTimestamp(log): | ||
""" | ||
Get latest timestamp from JSON log | ||
Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...) | ||
""" | ||
for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'): | ||
for line in iter_reverse_lines(f): | ||
l = json.loads(line.decode().replace("'", '"')) | ||
return dateparser.parse(l['time']) | ||
return 0 | ||
from dateutil import parser | ||
from slapos.grid.promise.generic import GenericPromise | ||
def tail_file(file_path, line_count=10): | ||
""" | ||
Returns the last lines of file. | ||
""" | ||
line_list = [] | ||
with open(file_path) as f: | ||
BUFSIZ = 1024 | ||
... | ... | @@ -25,5 +116,4 @@ def tail_file(file_path, line_count=10): |
size -= line_len | ||
bytes -= BUFSIZ | ||
block -= 1 | ||
return '\n'.join(''.join(line_list).splitlines()[-line_count:]) | ||
\ No newline at end of file | ||
return '\n'.join(''.join(line_list).splitlines()[-line_count:]) |