Skip to content

Commit c68334b

Browse files
craymichaelfacebook-github-bot
authored andcommitted
Fixes for mypy 1.12.0 in OSS tests (#1371)
Summary: Pull Request resolved: #1371 Fix or ignore all new type checks due to the release of mypy 1.12.0 Reviewed By: vivekmig Differential Revision: D64412835 fbshipit-source-id: 9514ed10069c75ed0eb8f257a162174eafb678d4
1 parent 4cb2808 commit c68334b

File tree

13 files changed

+35
-20
lines changed

13 files changed

+35
-20
lines changed

captum/attr/_core/dataloader_attr.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# pyre-strict
44
from collections import defaultdict
55
from copy import copy
6-
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
6+
from typing import Any, Callable, cast, Dict, Iterable, List, Optional, Tuple, Union
77

88
import torch
99
from captum._utils.common import (
@@ -193,8 +193,7 @@ def _forward_with_dataloader(
193193
feature_mask: Tuple[Tensor, ...],
194194
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
195195
reduce: Callable,
196-
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
197-
to_metric: Optional[Callable],
196+
to_metric: Optional[Callable[[Tensor], Tensor]],
198197
show_progress: bool,
199198
feature_idx_to_mask_idx: Dict[int, List[int]],
200199
) -> Tensor:
@@ -243,7 +242,8 @@ def _forward_with_dataloader(
243242

244243
accum_states[i] = reduce(accum_states[i], output, perturbed_inputs)
245244

246-
accum_results = [
245+
accum_states = cast(List[Tensor], accum_states)
246+
accum_results: List[Tensor] = [
247247
to_metric(accum) if to_metric else accum for accum in accum_states
248248
]
249249

@@ -276,7 +276,7 @@ def attribute(
276276
Args:
277277
278278
dataloader (torch.Dataloader): the dataloader to attribute, which should
279-
return a tuple of consistant size for every iteration
279+
return a tuple of consistent size for every iteration
280280
input_roles (tuple[int, ...], optional): a tuple of integers to define the
281281
role of each element returned from the dataloader. It should
282282
have the same size as the return of the dataloader.
@@ -326,7 +326,7 @@ def attribute(
326326
traverses needed is
327327
ceil(n_perturbations / perturbations_per_pass).
328328
329-
This arguement offers control of the trade-off between memory
329+
This argument offers control of the trade-off between memory
330330
and efficiency. If the dataloader involves slow operations like
331331
remote request or file I/O, multiple traversals can be
332332
inefficient. On the other hand, each perturbation needs to

captum/attr/_core/lime.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -522,7 +522,10 @@ def attribute(
522522
if show_progress:
523523
attr_progress.close()
524524

525-
combined_interp_inps = torch.cat(interpretable_inps).float()
525+
# Argument 1 to "cat" has incompatible type
526+
# "list[Tensor | tuple[Tensor, ...]]";
527+
# expected "tuple[Tensor, ...] | list[Tensor]" [arg-type]
528+
combined_interp_inps = torch.cat(interpretable_inps).float() # type: ignore
526529
combined_outputs = (
527530
torch.cat(outputs)
528531
if len(outputs[0].shape) > 0

captum/concept/_utils/classifier.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,9 @@ def train_and_eval(
186186
x_train, x_test, y_train, y_test = _train_test_split(
187187
torch.cat(inputs), torch.cat(labels), test_split=test_split_ratio
188188
)
189-
self.lm.device = device
189+
# error: Incompatible types in assignment (expression has type "str | Any",
190+
# variable has type "Tensor | Module") [assignment]
191+
self.lm.device = device # type: ignore
190192
self.lm.fit(DataLoader(TensorDataset(x_train, y_train)))
191193

192194
predict = self.lm(x_test)

captum/log/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
except ImportError:
2525
from functools import wraps
2626

27-
def log(*args: Any, **kwargs: Any) -> None:
27+
def log(*args: Any, **kwargs: Any) -> None: # type: ignore
2828
pass
2929

3030
# bug with mypy: https://github.com/python/mypy/issues/1153
@@ -56,12 +56,12 @@ def wrapper(*args: Any, **kwargs: Any):
5656
return _log_usage
5757

5858
# pyre-fixme[2]: Parameter must be annotated.
59-
def set_environment(env) -> None:
59+
def set_environment(env) -> None: # type: ignore
6060
pass
6161

6262
def disable_detailed_logging() -> None:
6363
pass
6464

6565
# pyre-fixme[2]: Parameter must be annotated.
66-
def patch_methods(tester, patch_log: bool = True) -> None:
66+
def patch_methods(tester, patch_log: bool = True) -> None: # type: ignore
6767
pass

captum/module/gaussian_stochastic_gates.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def __init__(
8181
mask=mask,
8282
# pyre-fixme[6]: For 3rd argument expected `float` but got
8383
# `Optional[float]`.
84-
reg_weight=reg_weight,
84+
reg_weight=reg_weight, # type: ignore
8585
reg_reduction=reg_reduction,
8686
)
8787

@@ -91,7 +91,7 @@ def __init__(
9191

9292
# pyre-fixme[58]: `<` is not supported for operand types `int` and
9393
# `Optional[float]`.
94-
assert 0 < std, f"the standard deviation should be positive, received {std}"
94+
assert 0 < std, f"the standard deviation should be positive, received {std}" # type: ignore # noqa: E501 line too long
9595
self.std = std
9696

9797
def _sample_gate_values(self, batch_size: int) -> Tensor:
@@ -109,7 +109,7 @@ def _sample_gate_values(self, batch_size: int) -> Tensor:
109109
n = torch.empty(batch_size, self.n_gates, device=self.mu.device)
110110
# pyre-fixme[6]: For 2nd argument expected `float` but got
111111
# `Optional[float]`.
112-
n.normal_(mean=0, std=self.std)
112+
n.normal_(mean=0, std=self.std) # type: ignore
113113
return self.mu + n
114114

115115
return self.mu.expand(batch_size, self.n_gates)

tests/attr/helpers/gen_test_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def parse_test_config(
4141
baseline_distr = (
4242
test_config["baseline_distr"] if "baseline_distr" in test_config else False
4343
)
44-
return algorithms, model, args, layer, noise_tunnel, baseline_distr
44+
return algorithms, model, args, layer, noise_tunnel, baseline_distr # type: ignore
4545

4646

4747
def should_create_generated_test(algorithm: Type[Attribution]) -> bool:

tests/attr/layer/test_layer_gradient_shap.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ def _assert_attributions(
201201
if expected_delta is None:
202202
assert_attribution_delta(
203203
# pyre-fixme[6]: For 1st argument expected `FbBaseTest` but got `Test`.
204-
self,
204+
self, # type: ignore
205205
inputs,
206206
attrs,
207207
n_samples,

tests/helpers/__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,7 @@
1010
]
1111

1212
except ImportError:
13-
from tests.helpers.basic import BaseTest
13+
# tests/helpers/__init__.py:13: error: Incompatible import of "BaseTest"
14+
# (imported name has type "type[BaseTest]", local name has type
15+
# "type[FbBaseTest]") [assignment]
16+
from tests.helpers.basic import BaseTest # type: ignore

tests/helpers/influence/common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -409,6 +409,7 @@ def get_random_model_and_data(
409409
in_features, out_features, num_samples, use_gpu, unpack_inputs
410410
)
411411

412+
net: Union[BasicLinearNet, MultLinearNet, Linear, UnpackLinear]
412413
if model_type == "random":
413414
net = (
414415
BasicLinearNet(in_features, hidden_nodes, out_features)

tests/influence/_core/test_tracin_regression.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
class TestTracInRegression(BaseTest):
3232
def _test_tracin_regression_setup(
3333
self, tmpdir: str, features: int, use_gpu: bool = False
34-
) -> Tuple[RangeDataset, Dict[str, Any]]:
34+
) -> Tuple[RangeDataset, Dict[str, Any]]: # fixme (return type)
3535
low = 1
3636
high = 17
3737
dataset = RangeDataset(low, high, features, use_gpu)
@@ -49,7 +49,7 @@ def _test_tracin_regression_setup(
4949
torch.save(net_adjusted.state_dict(), os.path.join(tmpdir, checkpoint_name))
5050

5151
# pyre-fixme[61]: `net_adjusted` is undefined, or not always defined.
52-
return dataset, net_adjusted
52+
return dataset, net_adjusted # type: ignore
5353

5454
use_gpu_list = (
5555
[True, False]

tests/influence/_core/test_tracin_xor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ def _test_tracin_xor_setup(
167167

168168
dataset = BinaryDataset(use_gpu)
169169

170-
return net_adjusted, dataset
170+
return net_adjusted, dataset # type: ignore
171171

172172
parametrized_list: List[
173173
Tuple[Optional[str], DataInfluenceConstructor, str, bool]

tests/module/test_binary_concrete_stochastic_gates.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@
1818
]
1919
)
2020
class TestBinaryConcreteStochasticGates(BaseTest):
21+
# pyre-fixme[13]: Attribute `testing_device` is never initialized.
22+
testing_device: str
23+
2124
def setUp(self) -> None:
2225
super().setUp()
2326
# pyre-fixme[16]: `TestBinaryConcreteStochasticGates` has no attribute

tests/module/test_gaussian_stochastic_gates.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@
1919
]
2020
)
2121
class TestGaussianStochasticGates(BaseTest):
22+
# pyre-fixme[13]: Attribute `testing_device` is never initialized.
23+
testing_device: str
24+
2225
def setUp(self) -> None:
2326
super().setUp()
2427
# pyre-fixme[16]: `TestGaussianStochasticGates` has no attribute

0 commit comments

Comments
 (0)