Skip to content

Commit 24d7e34

Browse files
authored
Merge pull request #18 from alex-1001/enhanced-division-metrics
added fp, tp, fn, gt annotation division metrics
2 parents a80e9c1 + e623692 commit 24d7e34

File tree

3 files changed

+45
-19
lines changed

3 files changed

+45
-19
lines changed

ctc_metrics/metrics/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from ctc_metrics.metrics.validation.valid import valid
2-
from ctc_metrics.metrics.biological.bc import bc
2+
from ctc_metrics.metrics.biological.bc import bc, raw_division_metrics
33
from ctc_metrics.metrics.biological.ct import ct
44
from ctc_metrics.metrics.biological.cca import cca
55
from ctc_metrics.metrics.biological.tf import tf

ctc_metrics/metrics/biological/bc.py

Lines changed: 34 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -91,19 +91,16 @@ def is_matching(
9191
return False
9292
return True
9393

94-
95-
def bc(
94+
def raw_division_metrics(
9695
comp_tracks: np.ndarray,
9796
ref_tracks: np.ndarray,
9897
mapped_ref: list,
9998
mapped_comp: list,
10099
i: int
101100
):
102101
"""
103-
Computes the branching correctness metric. As described in the paper,
104-
"An objective comparison of cell-tracking algorithms."
105-
- Vladimir Ulman et al., Nature methods 2017
106-
102+
Computes number of true positives, false positives, and false negatives for divisions.
103+
107104
Args:
108105
comp_tracks: The result tracks. A (n,4) numpy ndarray with columns:
109106
- label
@@ -128,21 +125,26 @@ def bc(
128125
i: The maximal allowed error in frames.
129126
130127
Returns:
131-
The branching correctness metric.
128+
Tuple of true positives, false positives, and false negatives.
132129
"""
133-
134130
# Extract relevant tracks with children in reference
135131
ends_with_split_ref = get_ids_that_ends_with_split(ref_tracks)
136132
t_ref = np.array([ref_tracks[ref_tracks[:, 0] == ref][0, 2]
137133
for ref in ends_with_split_ref])
138-
if len(ends_with_split_ref) == 0:
139-
return None
134+
140135
# Extract relevant tracks with children in computed result
141136
ends_with_split_comp = get_ids_that_ends_with_split(comp_tracks)
142137
t_comp = np.asarray([comp_tracks[comp_tracks[:, 0] == comp][0, 2]
143138
for comp in ends_with_split_comp])
139+
140+
# If there are no divisions in the reference
141+
if len(ends_with_split_ref) == 0:
142+
return (0, len(ends_with_split_comp), 0)
143+
144+
# If there are no divisions in the computed result
144145
if len(ends_with_split_comp) == 0:
145-
return 0
146+
return (0, 0, len(ends_with_split_ref))
147+
146148
# Find all matches between reference and computed branching events (mitosis)
147149
matches = []
148150
for comp, tc in zip(ends_with_split_comp, t_comp):
@@ -162,8 +164,25 @@ def bc(
162164
comp_children, tr, tc
163165
):
164166
matches.append((ref, comp))
167+
return (len(matches), len(ends_with_split_comp) - len(matches), len(ends_with_split_ref) - len(matches))
168+
169+
def bc(
170+
tp: int,
171+
fp: int,
172+
fn: int
173+
):
174+
"""
175+
Computes the branching correctness metric. As described in the paper,
176+
"An objective comparison of cell-tracking algorithms."
177+
- Vladimir Ulman et al., Nature methods 2017
178+
179+
Args:
180+
tp: The number of true positives.
181+
fp: The number of false positives.
182+
fn: The number of false negatives.
183+
184+
Returns:
185+
The branching correctness metric.
186+
"""
165187
# Calculate BC(i)
166-
return calculate_f1_score(
167-
len(matches),
168-
len(ends_with_split_comp) - len(matches),
169-
len(ends_with_split_ref) - len(matches))
188+
return calculate_f1_score(tp, fp, fn)

ctc_metrics/scripts/evaluate.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import numpy as np
55

66
from ctc_metrics.metrics import (
7-
valid, det, seg, tra, ct, tf, bc, cca, mota, hota, idf1, chota, mtml, faf,
7+
valid, det, seg, tra, ct, tf, bc, raw_division_metrics, cca, mota, hota, idf1, chota, mtml, faf,
88
op_ctb, op_csb, bio, op_clb, lnk
99
)
1010
from ctc_metrics.metrics import ALL_METRICS
@@ -226,10 +226,17 @@ def calculate_metrics(
226226

227227
if "BC" in metrics:
228228
for i in range(4):
229-
results[f"BC({i})"] = bc(
230-
comp_tracks, ref_tracks,
229+
tp, fp, fn = raw_division_metrics(comp_tracks, ref_tracks,
231230
traj["mapped_ref"], traj["mapped_comp"],
232231
i=i)
232+
233+
if "gt_divisions" not in results:
234+
results["gt_divisions"] = tp + fn
235+
236+
results[f"tp_div({i})"] = tp
237+
results[f"fp_div({i})"] = fp
238+
results[f"fn_div({i})"] = fn
239+
results[f"BC({i})"] = bc(tp, fp, fn)
233240

234241
if "CCA" in metrics:
235242
results["CCA"] = cca(comp_tracks, ref_tracks)

0 commit comments

Comments
 (0)