Skip to content
Snippets Groups Projects
Verified Commit 9e10d561 authored by Tamas Gal's avatar Tamas Gal :speech_balloon:
Browse files

Improve and clean up log analyser

parent 058cd92d
No related branches found
No related tags found
No related merge requests found
Pipeline #32456 passed
......@@ -20,8 +20,9 @@ import time
# Event names and colours
EVENTS = dict(ERROR="red", WARNING="orange", Died="deeppink", Born="steelblue")
BUFFER_SIZE = 16*1024**2 # buffer size for the lines when parsing the log
BUFFER_SIZE = 16 * 1024**2 # buffer size for the lines when parsing the log
REGEX_LOG_LINE = re.compile(".+ ([A-Za-z]+) \[([A-Za-z]+)\]: .+")
REGEX_LEGACY_LOG_LINE = re.compile("^.+ \[([A-Za-z]+)\]: .+")
def plot_log_statistics(out_file, summary, title):
......@@ -32,9 +33,15 @@ def plot_log_statistics(out_file, summary, title):
fig, ax = plt.subplots()
for idx, (event, color) in enumerate(EVENTS.items()):
x_offset = idx*w + w/2 - w*len(EVENTS)/2
ax.bar(xs + x_offset, [summary[process][event] for process in processes], width=w, color=color, label=event)
ax.set_xticks(xs, processes);
x_offset = idx * w + w / 2 - w * len(EVENTS) / 2
ax.bar(
xs + x_offset,
[summary[process][event] for process in processes],
width=w,
color=color,
label=event,
)
ax.set_xticks(xs, processes)
ax.set_ylabel("count")
ax.legend()
ax.set_ylim(1e-1, 1e6)
......@@ -67,8 +74,6 @@ def process_log_file(log_file):
The returned dictionary has the structure dict[PROCESS][EVENT] => count.
"""
summary = defaultdict(lambda: defaultdict(int))
# for event in EVENTS:
# summary[event] = defaultdict(int)
n_lines_parsed = 0
n_lines_unparsed = 0
......@@ -80,22 +85,32 @@ def process_log_file(log_file):
if m is not None:
tag = m[1]
process = m[2]
if tag in ("Born", "Died"):
summary[process][tag] += 1
for severity in ("WARNING", "ERROR"):
if severity in line:
summary[process][severity] += 1
n_lines_parsed += 1
else:
n_lines_unparsed += 1
m = REGEX_LEGACY_LOG_LINE.match(line)
if m is not None:
tag = "MSG"
process = m[1]
else:
n_lines_unparsed += 1
continue
if tag in ("Born", "Died"):
summary[process][tag] += 1
for severity in ("WARNING", "ERROR"):
if (
severity in line
or severity.lower() in line
or severity.lower().capitalize() in line
):
summary[process][severity] += 1
n_lines_parsed += 1
lines_chunk = fobj.readlines(BUFFER_SIZE)
print(f"Successfully parsed {n_lines_parsed} lines")
print(f"A total of {n_lines_unparsed} could not be parsed.")
print(f" parsed lines: {n_lines_parsed}")
print(f" unparsed lines: {n_lines_unparsed}")
for process, stats in summary.items():
print(f"{process}:")
print(f" {process}:")
for event, n_lines in stats.items():
print(f" {event}: {n_lines}")
print(f" {event}: {n_lines}")
return summary
......@@ -114,7 +129,7 @@ def main():
summary = process_log_file(log_fpath)
title = os.path.basename(fname)
plot_log_statistics(plot_fpath, summary, title)
time.sleep(seconds_to_UTC_midnight() + 5*60)
time.sleep(seconds_to_UTC_midnight() + 5 * 60)
if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment