Skip to content

Commit f1114e8

Browse files
committed
Black reformatting
1 parent 01c9021 commit f1114e8

File tree

2 files changed

+8
-2
lines changed

2 files changed

+8
-2
lines changed

pytorch_toolbelt/inference/ensembling.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,11 @@ class ApplySoftmaxTo(nn.Module):
1313
dim: int
1414

1515
def __init__(
16-
self, model: nn.Module, output_key: Union[str, int, Iterable[str]] = "logits", dim: int = 1, temperature: float = 1
16+
self,
17+
model: nn.Module,
18+
output_key: Union[str, int, Iterable[str]] = "logits",
19+
dim: int = 1,
20+
temperature: float = 1,
1721
):
1822
"""
1923
Apply softmax activation on given output(s) of the model

pytorch_toolbelt/utils/torch_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,9 @@ def logit(x: torch.Tensor, eps=1e-5) -> torch.Tensor:
8686
return torch.log(x / (1.0 - x))
8787

8888

89-
def count_parameters(model: nn.Module, keys: Optional[Sequence[str]] = None, human_friendly: bool = False) -> Dict[str, int]:
89+
def count_parameters(
90+
model: nn.Module, keys: Optional[Sequence[str]] = None, human_friendly: bool = False
91+
) -> Dict[str, int]:
9092
"""
9193
Count number of total and trainable parameters of a model
9294
:param model: A model

0 commit comments

Comments
 (0)