Merge branch 'main' into afpn

afpn
Glenn Jocher 1 year ago committed by GitHub
commit dbba2203ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      docs/models/yolov8.md
  2. 1
      requirements.txt
  3. 2
      ultralytics/__init__.py
  4. 2
      ultralytics/cfg/__init__.py
  5. 2
      ultralytics/engine/exporter.py
  6. 2
      ultralytics/engine/model.py
  7. 2
      ultralytics/engine/trainer.py
  8. 2
      ultralytics/engine/validator.py
  9. 2
      ultralytics/models/fastsam/model.py
  10. 2
      ultralytics/models/nas/model.py
  11. 4
      ultralytics/models/rtdetr/model.py
  12. 2
      ultralytics/models/yolo/classify/train.py
  13. 29
      ultralytics/utils/__init__.py
  14. 4
      ultralytics/utils/benchmarks.py

@ -21,12 +21,12 @@ YOLOv8 is the latest iteration in the YOLO series of real-time object detectors,
## Supported Tasks ## Supported Tasks
| Model Type | Pre-trained Weights | Task | | Model Type | Pre-trained Weights | Task |
|-------------|------------------------------------------------------------------------------------------------------------------|-----------------------| |-------------|---------------------------------------------------------------------------------------------------------------------|-----------------------|
| YOLOv8 | `yolov8n.pt`, `yolov8s.pt`, `yolov8m.pt`, `yolov8l.pt`, `yolov8x.pt` | Detection | | YOLOv8 | `yolov8n.pt`, `yolov8s.pt`, `yolov8m.pt`, `yolov8l.pt`, `yolov8x.pt` | Detection |
| YOLOv8-seg | `yolov8n-seg.pt`, `yolov8s-seg.pt`, `yolov8m-seg.pt`, `yolov8l-seg.pt`, `yolov8x-seg.pt` | Instance Segmentation | | YOLOv8-seg | `yolov8n-seg.pt`, `yolov8s-seg.pt`, `yolov8m-seg.pt`, `yolov8l-seg.pt`, `yolov8x-seg.pt` | Instance Segmentation |
| YOLOv8-pose | `yolov8n-pose.pt`, `yolov8s-pose.pt`, `yolov8m-pose.pt`, `yolov8l-pose.pt`, `yolov8x-pose.pt` ,`yolov8x-pose-p6` | Pose/Keypoints | | YOLOv8-pose | `yolov8n-pose.pt`, `yolov8s-pose.pt`, `yolov8m-pose.pt`, `yolov8l-pose.pt`, `yolov8x-pose.pt`, `yolov8x-pose-p6.pt` | Pose/Keypoints |
| YOLOv8-cls | `yolov8n-cls.pt`, `yolov8s-cls.pt`, `yolov8m-cls.pt`, `yolov8l-cls.pt`, `yolov8x-cls.pt` | Classification | | YOLOv8-cls | `yolov8n-cls.pt`, `yolov8s-cls.pt`, `yolov8m-cls.pt`, `yolov8l-cls.pt`, `yolov8x-cls.pt` | Classification |
## Supported Modes ## Supported Modes

@ -3,6 +3,7 @@
# Base ---------------------------------------- # Base ----------------------------------------
matplotlib>=3.2.2 matplotlib>=3.2.2
numpy>=1.22.2 # pinned by Snyk to avoid a vulnerability
opencv-python>=4.6.0 opencv-python>=4.6.0
pillow>=7.1.2 pillow>=7.1.2
pyyaml>=5.3.1 pyyaml>=5.3.1

@ -1,6 +1,6 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license # Ultralytics YOLO 🚀, AGPL-3.0 license
__version__ = '8.0.145' __version__ = '8.0.146'
from ultralytics.hub import start from ultralytics.hub import start
from ultralytics.models import RTDETR, SAM, YOLO from ultralytics.models import RTDETR, SAM, YOLO

@ -253,7 +253,7 @@ def handle_yolo_settings(args: List[str]) -> None:
SETTINGS_YAML.unlink() # delete the settings file SETTINGS_YAML.unlink() # delete the settings file
SETTINGS.reset() # create new settings SETTINGS.reset() # create new settings
LOGGER.info('Settings reset successfully') # inform the user that settings have been reset LOGGER.info('Settings reset successfully') # inform the user that settings have been reset
else: else: # save a new setting
new = dict(parse_key_value_pair(a) for a in args) new = dict(parse_key_value_pair(a) for a in args)
check_dict_alignment(SETTINGS, new) check_dict_alignment(SETTINGS, new)
SETTINGS.update(new) SETTINGS.update(new)

@ -177,7 +177,7 @@ class Exporter:
im = torch.zeros(self.args.batch, 3, *self.imgsz).to(self.device) im = torch.zeros(self.args.batch, 3, *self.imgsz).to(self.device)
file = Path( file = Path(
getattr(model, 'pt_path', None) or getattr(model, 'yaml_file', None) or model.yaml.get('yaml_file', '')) getattr(model, 'pt_path', None) or getattr(model, 'yaml_file', None) or model.yaml.get('yaml_file', ''))
if file.suffix == '.yaml': if file.suffix in ('.yaml', '.yml'):
file = Path(file.name) file = Path(file.name)
# Update model # Update model

@ -88,7 +88,7 @@ class Model:
suffix = Path(model).suffix suffix = Path(model).suffix
if not suffix and Path(model).stem in GITHUB_ASSET_STEMS: if not suffix and Path(model).stem in GITHUB_ASSET_STEMS:
model, suffix = Path(model).with_suffix('.pt'), '.pt' # add suffix, i.e. yolov8n -> yolov8n.pt model, suffix = Path(model).with_suffix('.pt'), '.pt' # add suffix, i.e. yolov8n -> yolov8n.pt
if suffix == '.yaml': if suffix in ('.yaml', '.yml'):
self._new(model, task) self._new(model, task)
else: else:
self._load(model, task) self._load(model, task)

@ -119,7 +119,7 @@ class BaseTrainer:
try: try:
if self.args.task == 'classify': if self.args.task == 'classify':
self.data = check_cls_dataset(self.args.data) self.data = check_cls_dataset(self.args.data)
elif self.args.data.endswith('.yaml') or self.args.task in ('detect', 'segment'): elif self.args.data.split('.')[-1] in ('yaml', 'yml') or self.args.task in ('detect', 'segment'):
self.data = check_det_dataset(self.args.data) self.data = check_det_dataset(self.args.data)
if 'yaml_file' in self.data: if 'yaml_file' in self.data:
self.args.data = self.data['yaml_file'] # for validating 'yolo train data=url.zip' usage self.args.data = self.data['yaml_file'] # for validating 'yolo train data=url.zip' usage

@ -126,7 +126,7 @@ class BaseValidator:
self.args.batch = 1 # export.py models default to batch-size 1 self.args.batch = 1 # export.py models default to batch-size 1
LOGGER.info(f'Forcing batch=1 square inference (1,3,{imgsz},{imgsz}) for non-PyTorch models') LOGGER.info(f'Forcing batch=1 square inference (1,3,{imgsz},{imgsz}) for non-PyTorch models')
if isinstance(self.args.data, str) and self.args.data.endswith('.yaml'): if isinstance(self.args.data, str) and self.args.data.split('.')[-1] in ('yaml', 'yml'):
self.data = check_det_dataset(self.args.data) self.data = check_det_dataset(self.args.data)
elif self.args.task == 'classify': elif self.args.task == 'classify':
self.data = check_cls_dataset(self.args.data, split=self.args.split) self.data = check_cls_dataset(self.args.data, split=self.args.split)

@ -23,7 +23,7 @@ class FastSAM(Model):
"""Call the __init__ method of the parent class (YOLO) with the updated default model""" """Call the __init__ method of the parent class (YOLO) with the updated default model"""
if model == 'FastSAM.pt': if model == 'FastSAM.pt':
model = 'FastSAM-x.pt' model = 'FastSAM-x.pt'
assert Path(model).suffix != '.yaml', 'FastSAM models only support pre-trained models.' assert Path(model).suffix not in ('.yaml', '.yml'), 'FastSAM models only support pre-trained models.'
super().__init__(model=model, task='segment') super().__init__(model=model, task='segment')
@property @property

@ -23,7 +23,7 @@ from .val import NASValidator
class NAS(Model): class NAS(Model):
def __init__(self, model='yolo_nas_s.pt') -> None: def __init__(self, model='yolo_nas_s.pt') -> None:
assert Path(model).suffix != '.yaml', 'YOLO-NAS models only support pre-trained models.' assert Path(model).suffix not in ('.yaml', '.yml'), 'YOLO-NAS models only support pre-trained models.'
super().__init__(model, task='detect') super().__init__(model, task='detect')
@smart_inference_mode() @smart_inference_mode()

@ -16,8 +16,8 @@ class RTDETR(Model):
""" """
def __init__(self, model='rtdetr-l.pt') -> None: def __init__(self, model='rtdetr-l.pt') -> None:
if model and not model.endswith('.pt') and not model.endswith('.yaml'): if model and not model.split('.')[-1] in ('pt', 'yaml', 'yml'):
raise NotImplementedError('RT-DETR only supports creating from pt file or yaml file.') raise NotImplementedError('RT-DETR only supports creating from *.pt file or *.yaml file.')
super().__init__(model=model, task='detect') super().__init__(model=model, task='detect')
@property @property

@ -57,7 +57,7 @@ class ClassificationTrainer(BaseTrainer):
self.model, _ = attempt_load_one_weight(model, device='cpu') self.model, _ = attempt_load_one_weight(model, device='cpu')
for p in self.model.parameters(): for p in self.model.parameters():
p.requires_grad = True # for training p.requires_grad = True # for training
elif model.endswith('.yaml'): elif model.split('.')[-1] in ('yaml', 'yml'):
self.model = self.get_model(cfg=model) self.model = self.get_model(cfg=model)
elif model in torchvision.models.__dict__: elif model in torchvision.models.__dict__:
self.model = torchvision.models.__dict__[model](weights='IMAGENET1K_V1' if self.args.pretrained else None) self.model = torchvision.models.__dict__[model](weights='IMAGENET1K_V1' if self.args.pretrained else None)

@ -714,24 +714,6 @@ def set_sentry():
logging.getLogger(logger).setLevel(logging.CRITICAL) logging.getLogger(logger).setLevel(logging.CRITICAL)
def update_dict_recursive(d, u):
"""
Recursively updates the dictionary `d` with the key-value pairs from the dictionary `u` without overwriting
entire sub-dictionaries. Note that function recursion is intended and not a problem, as this allows for updating
nested dictionaries at any arbitrary depth.
Args:
d (dict): The dictionary to be updated.
u (dict): The dictionary to update `d` with.
Returns:
(dict): The recursively updated dictionary.
"""
for k, v in u.items():
d[k] = update_dict_recursive(d.get(k, {}), v) if isinstance(v, dict) else v
return d
class SettingsManager(dict): class SettingsManager(dict):
""" """
Manages Ultralytics settings stored in a YAML file. Manages Ultralytics settings stored in a YAML file.
@ -792,20 +774,15 @@ class SettingsManager(dict):
def load(self): def load(self):
"""Loads settings from the YAML file.""" """Loads settings from the YAML file."""
self.update(yaml_load(self.file)) super().update(yaml_load(self.file))
def save(self): def save(self):
"""Saves the current settings to the YAML file.""" """Saves the current settings to the YAML file."""
yaml_save(self.file, dict(self)) yaml_save(self.file, dict(self))
def update(self, *args, **kwargs): def update(self, *args, **kwargs):
"""Updates a setting value in the current settings and saves the settings.""" """Updates a setting value in the current settings."""
new = dict(*args, **kwargs) super().update(*args, **kwargs)
if any(isinstance(v, dict) for v in new.values()):
update_dict_recursive(self, new)
else:
# super().update(*args, **kwargs)
super().update(new)
self.save() self.save()
def reset(self): def reset(self):

@ -192,7 +192,7 @@ class ProfileModels:
output = [] output = []
for file in files: for file in files:
engine_file = file.with_suffix('.engine') engine_file = file.with_suffix('.engine')
if file.suffix in ('.pt', '.yaml'): if file.suffix in ('.pt', '.yaml', '.yml'):
model = YOLO(str(file)) model = YOLO(str(file))
model.fuse() # to report correct params and GFLOPs in model.info() model.fuse() # to report correct params and GFLOPs in model.info()
model_info = model.info() model_info = model.info()
@ -229,7 +229,7 @@ class ProfileModels:
if path.is_dir(): if path.is_dir():
extensions = ['*.pt', '*.onnx', '*.yaml'] extensions = ['*.pt', '*.onnx', '*.yaml']
files.extend([file for ext in extensions for file in glob.glob(str(path / ext))]) files.extend([file for ext in extensions for file in glob.glob(str(path / ext))])
elif path.suffix in {'.pt', '.yaml'}: # add non-existing elif path.suffix in ('.pt', '.yaml', '.yml'): # add non-existing
files.append(str(path)) files.append(str(path))
else: else:
files.extend(glob.glob(str(path))) files.extend(glob.glob(str(path)))

Loading…
Cancel
Save