diff --git a/src/apex/optimizers/npu_fused_adadelta.py b/src/apex/optimizers/npu_fused_adadelta.py index 3dedec9b7eaf7d64145d7424615eba4af87a134a..247f3215bc77cf88069ea81e05dd481de49b5096 100644 --- a/src/apex/optimizers/npu_fused_adadelta.py +++ b/src/apex/optimizers/npu_fused_adadelta.py @@ -69,6 +69,8 @@ class NpuFusedAdadelta(Optimizer): raise ValueError("Invalid epsilon value: {}".format(eps)) if weight_decay < 0.0: raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + if weight_decay > 0.0: + raise ValueError("Invalid learning rate: {}".format(lr)) defaults = dict(lr=lr, rho=rho, eps=eps, weight_decay=weight_decay) self.is_npu_fused_optimizer = True