Skip to content

Commit 399fa45

Browse files
committed
Black reformatting
1 parent bad2cba commit 399fa45

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

pytorch_toolbelt/optimization/functional.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@ def get_lr_decay_parameters(model: nn.Module, learning_rate: float, lr_multiplie
1414
groups: {"encoder": 0.1 ,"encoder.layer2": 0.2}
1515
"""
1616
custom_lr_parameters = dict(
17-
(group_name, {"params": [], "lr": learning_rate * lr_factor}) for (group_name, lr_factor) in lr_multipliers.items()
17+
(group_name, {"params": [], "lr": learning_rate * lr_factor})
18+
for (group_name, lr_factor) in lr_multipliers.items()
1819
)
1920
custom_lr_parameters["default"] = {"params": [], "lr": learning_rate}
2021

@@ -45,7 +46,9 @@ def get_optimizable_parameters(model: nn.Module) -> Iterator[nn.Parameter]:
4546
return filter(lambda x: x.requires_grad, model.parameters())
4647

4748

48-
def freeze_model(module: nn.Module, freeze_parameters: Optional[bool] = True, freeze_bn: Optional[bool] = True) -> nn.Module:
49+
def freeze_model(
50+
module: nn.Module, freeze_parameters: Optional[bool] = True, freeze_bn: Optional[bool] = True
51+
) -> nn.Module:
4952
"""
5053
Change 'requires_grad' value for module and it's child modules and
5154
optionally freeze batchnorm modules.

0 commit comments

Comments
 (0)