diff --git a/backend/scripts/log_analyser.py b/backend/scripts/log_analyser.py
index 4184c3e835bfb4bb076a105b1b062751f30a04c8..cd74b17bdc9a89f49a713290caa6019243518912 100755
--- a/backend/scripts/log_analyser.py
+++ b/backend/scripts/log_analyser.py
@@ -20,8 +20,9 @@ import time
 
 # Event names and colours
 EVENTS = dict(ERROR="red", WARNING="orange", Died="deeppink", Born="steelblue")
-BUFFER_SIZE = 16*1024**2  # buffer size for the lines when parsing the log
+BUFFER_SIZE = 16 * 1024**2  # buffer size for the lines when parsing the log
 REGEX_LOG_LINE = re.compile(".+ ([A-Za-z]+) \[([A-Za-z]+)\]: .+")
+REGEX_LEGACY_LOG_LINE = re.compile("^.+ \[([A-Za-z]+)\]: .+")
 
 
 def plot_log_statistics(out_file, summary, title):
@@ -32,9 +33,15 @@ def plot_log_statistics(out_file, summary, title):
 
     fig, ax = plt.subplots()
     for idx, (event, color) in enumerate(EVENTS.items()):
-        x_offset = idx*w + w/2 - w*len(EVENTS)/2
-        ax.bar(xs + x_offset, [summary[process][event] for process in processes], width=w, color=color, label=event)
-    ax.set_xticks(xs, processes);
+        x_offset = idx * w + w / 2 - w * len(EVENTS) / 2
+        ax.bar(
+            xs + x_offset,
+            [summary[process][event] for process in processes],
+            width=w,
+            color=color,
+            label=event,
+        )
+    ax.set_xticks(xs, processes)
     ax.set_ylabel("count")
     ax.legend()
     ax.set_ylim(1e-1, 1e6)
@@ -67,8 +74,6 @@ def process_log_file(log_file):
     The returned dictionary has the structure dict[PROCESS][EVENT] => count.
     """
     summary = defaultdict(lambda: defaultdict(int))
-    # for event in EVENTS:
-    #     summary[event] = defaultdict(int)
 
     n_lines_parsed = 0
     n_lines_unparsed = 0
@@ -80,22 +85,32 @@ def process_log_file(log_file):
                 if m is not None:
                     tag = m[1]
                     process = m[2]
-                    if tag in ("Born", "Died"):
-                        summary[process][tag] += 1
-                    for severity in ("WARNING", "ERROR"):
-                        if severity in line:
-                            summary[process][severity] += 1
-                    n_lines_parsed += 1
                 else:
-                    n_lines_unparsed += 1
+                    m = REGEX_LEGACY_LOG_LINE.match(line)
+                    if m is not None:
+                        tag = "MSG"
+                        process = m[1]
+                    else:
+                        n_lines_unparsed += 1
+                        continue
+                if tag in ("Born", "Died"):
+                    summary[process][tag] += 1
+                for severity in ("WARNING", "ERROR"):
+                    if (
+                        severity in line
+                        or severity.lower() in line
+                        or severity.lower().capitalize() in line
+                    ):
+                        summary[process][severity] += 1
+                n_lines_parsed += 1
             lines_chunk = fobj.readlines(BUFFER_SIZE)
 
-    print(f"Successfully parsed {n_lines_parsed} lines")
-    print(f"A total of {n_lines_unparsed} could not be parsed.")
+    print(f"  parsed lines: {n_lines_parsed}")
+    print(f"  unparsed lines: {n_lines_unparsed}")
     for process, stats in summary.items():
-        print(f"{process}:")
+        print(f"    {process}:")
         for event, n_lines in stats.items():
-            print(f"  {event}: {n_lines}")
+            print(f"      {event}: {n_lines}")
 
     return summary
 
@@ -114,7 +129,7 @@ def main():
                 summary = process_log_file(log_fpath)
                 title = os.path.basename(fname)
                 plot_log_statistics(plot_fpath, summary, title)
-        time.sleep(seconds_to_UTC_midnight() + 5*60)
+        time.sleep(seconds_to_UTC_midnight() + 5 * 60)
 
 
 if __name__ == "__main__":