|
|
@ -791,6 +791,8 @@ class BaseTrainer: |
|
|
|
else: # weight (with decay) |
|
|
|
else: # weight (with decay) |
|
|
|
g[0].append(param) |
|
|
|
g[0].append(param) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
optimizers = {"Adam", "Adamax", "AdamW", "NAdam", "RAdam", "RMSProp", "SGD", "auto"} |
|
|
|
|
|
|
|
name = {x.lower(): x for x in optimizers}.get(name.lower(), None) |
|
|
|
if name in {"Adam", "Adamax", "AdamW", "NAdam", "RAdam"}: |
|
|
|
if name in {"Adam", "Adamax", "AdamW", "NAdam", "RAdam"}: |
|
|
|
optimizer = getattr(optim, name, optim.Adam)(g[2], lr=lr, betas=(momentum, 0.999), weight_decay=0.0) |
|
|
|
optimizer = getattr(optim, name, optim.Adam)(g[2], lr=lr, betas=(momentum, 0.999), weight_decay=0.0) |
|
|
|
elif name == "RMSProp": |
|
|
|
elif name == "RMSProp": |
|
|
@ -799,9 +801,8 @@ class BaseTrainer: |
|
|
|
optimizer = optim.SGD(g[2], lr=lr, momentum=momentum, nesterov=True) |
|
|
|
optimizer = optim.SGD(g[2], lr=lr, momentum=momentum, nesterov=True) |
|
|
|
else: |
|
|
|
else: |
|
|
|
raise NotImplementedError( |
|
|
|
raise NotImplementedError( |
|
|
|
f"Optimizer '{name}' not found in list of available optimizers " |
|
|
|
f"Optimizer '{name}' not found in list of available optimizers {optimizers}. " |
|
|
|
f"[Adam, AdamW, NAdam, RAdam, RMSProp, SGD, auto]." |
|
|
|
"Request support for addition optimizers at https://github.com/ultralytics/ultralytics." |
|
|
|
"To request support for addition optimizers please visit https://github.com/ultralytics/ultralytics." |
|
|
|
|
|
|
|
) |
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
optimizer.add_param_group({"params": g[0], "weight_decay": decay}) # add g0 with weight_decay |
|
|
|
optimizer.add_param_group({"params": g[0], "weight_decay": decay}) # add g0 with weight_decay |
|
|
|