jettify / pytorch-optimizer

Consider using f-strings PYL-C0209
Performance
Minor
a year agoa year old
Formatting a regular string which could be a f-string
 48        weight_decay: float = 0,
 49    ) -> None:
 50        if lr <= 0.0:
 51            raise ValueError("Invalid learning rate: {}".format(lr)) 52        if eps < 0.0:
 53            raise ValueError("Invalid epsilon value: {}".format(eps))
 54        if not 0.0 <= betas[0] < 1.0:
Formatting a regular string which could be a f-string
 61            )
 62        if weight_decay < 0:
 63            raise ValueError(
 64                "Invalid weight_decay value: {}".format(weight_decay) 65            )
 66
 67        defaults = dict(
Formatting a regular string which could be a f-string
 50        if lr <= 0.0:
 51            raise ValueError("Invalid learning rate: {}".format(lr))
 52        if eps < 0.0:
 53            raise ValueError("Invalid epsilon value: {}".format(eps)) 54        if not 0.0 <= betas[0] < 1.0:
 55            raise ValueError(
 56                "Invalid beta parameter at index 0: {}".format(betas[0])
Formatting a regular string which could be a f-string
 53            raise ValueError("Invalid epsilon value: {}".format(eps))
 54        if not 0.0 <= betas[0] < 1.0:
 55            raise ValueError(
 56                "Invalid beta parameter at index 0: {}".format(betas[0]) 57            )
 58        if not 0.0 <= betas[1] < 1.0:
 59            raise ValueError(
Formatting a regular string which could be a f-string
 57            )
 58        if not 0.0 <= betas[1] < 1.0:
 59            raise ValueError(
 60                "Invalid beta parameter at index 1: {}".format(betas[1]) 61            )
 62        if weight_decay < 0:
 63            raise ValueError(