jettify / pytorch-optimizer

Misplaced comparison constant PYL-C2201
Style
Major
6 occurrences in this check
Comparison should be weight_decay >= 0.0
208            raise ValueError(
209                'Invalid beta parameter at index 1: {}'.format(betas[1])
210            )
211        if not 0.0 <= weight_decay:212            raise ValueError(
213                'Invalid weight_decay value: {}'.format(weight_decay)
214            )
Comparison should be eps >= 0.0
198    ):
199        if not 0.0 <= lr:
200            raise ValueError('Invalid learning rate: {}'.format(lr))
201        if not 0.0 <= eps:202            raise ValueError('Invalid epsilon value: {}'.format(eps))
203        if not 0.0 <= betas[0] < 1.0:
204            raise ValueError(
Comparison should be lr >= 0.0
196        amsgrad=False,
197        adamd_bias_correction: bool = False,
198    ):
199        if not 0.0 <= lr:200            raise ValueError('Invalid learning rate: {}'.format(lr))
201        if not 0.0 <= eps:
202            raise ValueError('Invalid epsilon value: {}'.format(eps))
Comparison should be weight_decay >= 0.0
399            raise ValueError(
400                'Invalid beta parameter at index 1: {}'.format(betas[1])
401            )
402        if not 0.0 <= weight_decay:403            raise ValueError(
404                'Invalid weight_decay value: {}'.format(weight_decay)
405            )
Comparison should be eps >= 0.0
389    ):
390        if not 0.0 <= lr:
391            raise ValueError('Invalid learning rate: {}'.format(lr))
392        if not 0.0 <= eps:393            raise ValueError('Invalid epsilon value: {}'.format(eps))
394        if not 0.0 <= betas[0] < 1.0:
395            raise ValueError(
Comparison should be lr >= 0.0
387        amsgrad=False,
388        adamd_bias_correction: bool = False,
389    ):
390        if not 0.0 <= lr:391            raise ValueError('Invalid learning rate: {}'.format(lr))
392        if not 0.0 <= eps:
393            raise ValueError('Invalid epsilon value: {}'.format(eps))