jettify / pytorch-optimizer

Use of an unneeded not detected PYL-C0113
Anti-pattern
Major
4 years ago4 years old
Consider changing "not 0.0 <= weight_decay" to "0.0 > weight_decay"
 51            raise ValueError("Invalid epsilon value: {}".format(eps))
 52        if not 0.0 <= beta < 1.0:
 53            raise ValueError("Invalid beta parameter: {}".format(beta))
 54        if not 0.0 <= weight_decay: 55            raise ValueError(
 56                "Invalid weight_decay value: {}".format(weight_decay)
 57            )
Consider changing "not 0.0 <= warmup" to "0.0 > warmup"
 55            raise ValueError(
 56                "Invalid weight_decay value: {}".format(weight_decay)
 57            )
 58        if not 0.0 <= warmup: 59            raise ValueError("Invalid warmup updates: {}".format(warmup))
 60        if not 0.0 <= init_lr <= 1.0:
 61            raise ValueError(
Consider changing "not 0.0 <= lr" to "0.0 > lr"
 49        amsgrad: bool = False,
 50        nesterov: bool = False,
 51    ):
 52        if not 0.0 <= lr: 53            raise ValueError("Invalid learning rate: {}".format(lr))
 54        if not 0.0 <= eps:
 55            raise ValueError("Invalid epsilon value: {}".format(eps))
Consider changing "not 0.0 <= eps" to "0.0 > eps"
 51    ):
 52        if not 0.0 <= lr:
 53            raise ValueError("Invalid learning rate: {}".format(lr))
 54        if not 0.0 <= eps: 55            raise ValueError("Invalid epsilon value: {}".format(eps))
 56        if not 0.0 <= betas[0] < 1.0:
 57            raise ValueError(