BaseTrainer with `find_unused_parameters=True` when using DistributedDataParallel() (#14323)

pull/14329/head
Chia-Hsiang Tsai 5 months ago committed by GitHub
parent 997f2c92cd
commit 470b120a1b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      ultralytics/engine/trainer.py

@ -266,7 +266,7 @@ class BaseTrainer:
self.amp = bool(self.amp) # as boolean
self.scaler = torch.cuda.amp.GradScaler(enabled=self.amp)
if world_size > 1:
self.model = nn.parallel.DistributedDataParallel(self.model, device_ids=[RANK])
self.model = nn.parallel.DistributedDataParallel(self.model, device_ids=[RANK], find_unused_parameters=True)
# Check imgsz
gs = max(int(self.model.stride.max() if hasattr(self.model, "stride") else 32), 32) # grid size (max stride)

Loading…
Cancel
Save