Fix incorrect metrics when nothing is detected (#19362)

Signed-off-by: Mohammed Yasin <32206511+Y-T-G@users.noreply.github.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
pull/19311/head
Mohammed Yasin 2 weeks ago committed by GitHub
parent c83c3506fa
commit e96404c919
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      ultralytics/models/yolo/detect/val.py
  2. 2
      ultralytics/utils/metrics.py

@ -186,7 +186,7 @@ class DetectionValidator(BaseValidator):
self.nt_per_class = np.bincount(stats["target_cls"].astype(int), minlength=self.nc)
self.nt_per_image = np.bincount(stats["target_img"].astype(int), minlength=self.nc)
stats.pop("target_img", None)
if len(stats) and stats["tp"].any():
if len(stats):
self.metrics.process(**stats)
return self.metrics.results_dict

@ -604,7 +604,7 @@ def ap_per_class(
if j == 0:
prec_values.append(np.interp(x, mrec, mpre)) # precision at mAP@0.5
prec_values = np.array(prec_values) # (nc, 1000)
prec_values = np.array(prec_values) if prec_values else np.zeros((1, 1000)) # (nc, 1000)
# Compute F1 (harmonic mean of precision and recall)
f1_curve = 2 * p_curve * r_curve / (p_curve + r_curve + eps)

Loading…
Cancel
Save