Commit c0655534 authored by Rafael Monnerat's avatar Rafael Monnerat

monitor: Logrotate can call this script multiple times

  Consider the last start date to not include same entry multiple times
parent c1673418
...@@ -15,35 +15,47 @@ def parseArguments(): ...@@ -15,35 +15,47 @@ def parseArguments():
return parser return parser
def buildStatistic(history_folder): def buildStatistic(history_folder):
now = time.time()
for p in glob.glob("%s/*.history.json" % history_folder): for p in glob.glob("%s/*.history.json" % history_folder):
result = {} result = {}
stats_list = [] stats_list = []
promise_name = p.split("/")[-1].replace(".history.json", "") promise_name = p.split("/")[-1].replace(".history.json", "")
stat_file_path = p.replace(".history.json", ".stats.json")
if os.path.exists(stat_file_path):
with open(stat_file_path) as f:
stats_dict = json.load(f)
f.close()
else:
stats_dict = {"date": now, "data": []}
last_date = None
if stats_dict["data"]:
last_date = stats_dict["data"][-1]["start-date"]
with open(p) as f: with open(p) as f:
j = json.load(f) j = json.load(f)
j_last_date = j['data'][-1]["start-date"]
if last_date == j_last_date:
# This file was already loaded, so skip
continue
  • @rafael What happened if last_date==j_last_date is TRUEand call continue here?

    I can understand your intention, it is supposed to skip the next p. But, isn't this cause dead-lock of opening file on open(p) ?

    Why I am asking/investigating this is because I found a heavy load on this script in a vifib front-end server. This process (monitor.statistics) is continuously running, and the server CPU load is 100% almost all the time due to this script.

    I am not really sure the reason of the continuous running, and continue is just my guess.

Please register or sign in to reply
for entry in j['data']: for entry in j['data']:
day = entry["start-date"].split(" ")[0] day = entry["start-date"].split(" ")[0]
result.setdefault(day, {"ERROR": 0, "OK": 0}) result.setdefault(day, {"ERROR": 0, "OK": 0})
result[day][str(entry["status"])] += 1 result[day][str(entry["status"])] += 1
f.close() f.close()
for date, stat in result.iteritems(): for date, stat in result.iteritems():
stats_list.append( stats_list.append(
{"status": "ERROR" if stat["ERROR"] > 0 else "OK", {"status": "ERROR" if stat["ERROR"] > 0 else "OK",
"change-time": 0, "change-time": now,
"start-date": "%s 00:00:00" % date, "start-date": j_last_date,
"message": stat}) "message": stat})
stat_file_path = p.replace(".history.json", ".stats.json")
if os.path.exists(stat_file_path):
with open(stat_file_path) as f:
stats_dict = json.load(f)
f.close()
else:
stats_dict = {"date": time.time(), "data": []}
stats_dict["data"].extend(stats_list) stats_dict["data"].extend(stats_list)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment