Skip to content

Commit 47dc19d

Browse files
igorsugakfacebook-github-bot
authored andcommitted
Upgrade fbcode/pytorch to Python Scientific Stack 2
Differential Revision: D64008689
1 parent d2c6f5e commit 47dc19d

File tree

12 files changed

+62
-48
lines changed

12 files changed

+62
-48
lines changed

botorch/exceptions/errors.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from typing import Any
1212

1313
import numpy as np
14+
import numpy.typing as npt
1415

1516

1617
class BotorchError(Exception):
@@ -59,7 +60,7 @@ class OptimizationTimeoutError(BotorchError):
5960
r"""Exception raised when optimization times out."""
6061

6162
def __init__(
62-
self, /, *args: Any, current_x: np.ndarray, runtime: float, **kwargs: Any
63+
self, /, *args: Any, current_x: npt.NDArray, runtime: float, **kwargs: Any
6364
) -> None:
6465
r"""
6566
Args:
@@ -77,7 +78,7 @@ def __init__(
7778
class OptimizationGradientError(BotorchError, RuntimeError):
7879
r"""Exception raised when gradient array `gradf` containts NaNs."""
7980

80-
def __init__(self, /, *args: Any, current_x: np.ndarray, **kwargs: Any) -> None:
81+
def __init__(self, /, *args: Any, current_x: npt.NDArray, **kwargs: Any) -> None:
8182
r"""
8283
Args:
8384
*args: Standard args to `BoTorchError`.

botorch/generation/gen.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from typing import Any, NoReturn
1818

1919
import numpy as np
20+
import numpy.typing as npt
2021
import torch
2122
from botorch.acquisition import AcquisitionFunction
2223
from botorch.exceptions.errors import OptimizationGradientError
@@ -191,7 +192,7 @@ def gen_candidates_scipy(
191192
with_grad = options.get("with_grad", True)
192193
if with_grad:
193194

194-
def f_np_wrapper(x: np.ndarray, f: Callable):
195+
def f_np_wrapper(x: npt.NDArray, f: Callable):
195196
"""Given a torch callable, compute value + grad given a numpy array."""
196197
if np.isnan(x).any():
197198
raise RuntimeError(
@@ -223,7 +224,7 @@ def f_np_wrapper(x: np.ndarray, f: Callable):
223224

224225
else:
225226

226-
def f_np_wrapper(x: np.ndarray, f: Callable):
227+
def f_np_wrapper(x: npt.NDArray, f: Callable):
227228
X = torch.from_numpy(x).to(initial_conditions).view(shapeX).contiguous()
228229
with torch.no_grad():
229230
X_fix = fix_features(X=X, fixed_features=fixed_features)

botorch/models/pairwise_gp.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from typing import Any
2727

2828
import numpy as np
29+
import numpy.typing as npt
2930
import torch
3031
from botorch.acquisition.objective import PosteriorTransform
3132
from botorch.exceptions import UnsupportedError
@@ -397,13 +398,13 @@ def _prior_predict(self, X: Tensor) -> tuple[Tensor, Tensor]:
397398

398399
def _grad_posterior_f(
399400
self,
400-
utility: Tensor | np.ndarray,
401+
utility: Tensor | npt.NDArray,
401402
datapoints: Tensor,
402403
D: Tensor,
403404
covar_chol: Tensor,
404405
covar_inv: Tensor | None = None,
405406
ret_np: bool = False,
406-
) -> Tensor | np.ndarray:
407+
) -> Tensor | npt.NDArray:
407408
r"""Compute the gradient of S loss wrt to f/utility in [Chu2005preference]_.
408409
409410
For finding f_map, which is negative of the log posterior, i.e., -log(p(f|D))
@@ -441,13 +442,13 @@ def _grad_posterior_f(
441442

442443
def _hess_posterior_f(
443444
self,
444-
utility: Tensor | np.ndarray,
445+
utility: Tensor | npt.NDArray,
445446
datapoints: Tensor,
446447
D: Tensor,
447448
covar_chol: Tensor,
448449
covar_inv: Tensor,
449450
ret_np: bool = False,
450-
) -> Tensor | np.ndarray:
451+
) -> Tensor | npt.NDArray:
451452
r"""Compute the hessian of S loss wrt utility for finding f_map.
452453
453454
which is negative of the log posterior, i.e., -log(p(f|D))

botorch/optim/closures/core.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
from functools import partial
1414
from typing import Any
1515

16+
import numpy.typing as npt
17+
1618
import torch
1719
from botorch.optim.utils import (
1820
_handle_numerical_errors,
@@ -82,10 +84,10 @@ def __init__(
8284
self,
8385
closure: Callable[[], tuple[Tensor, Sequence[Tensor | None]]],
8486
parameters: dict[str, Tensor],
85-
as_array: Callable[[Tensor], ndarray] = None, # pyre-ignore [9]
86-
as_tensor: Callable[[ndarray], Tensor] = torch.as_tensor,
87-
get_state: Callable[[], ndarray] = None, # pyre-ignore [9]
88-
set_state: Callable[[ndarray], None] = None, # pyre-ignore [9]
87+
as_array: Callable[[Tensor], npt.NDArray] = None, # pyre-ignore [9]
88+
as_tensor: Callable[[npt.NDArray], Tensor] = torch.as_tensor,
89+
get_state: Callable[[], npt.NDArray] = None, # pyre-ignore [9]
90+
set_state: Callable[[npt.NDArray], None] = None, # pyre-ignore [9]
8991
fill_value: float = 0.0,
9092
persistent: bool = True,
9193
) -> None:
@@ -140,11 +142,11 @@ def __init__(
140142

141143
self.fill_value = fill_value
142144
self.persistent = persistent
143-
self._gradient_ndarray: ndarray | None = None
145+
self._gradient_ndarray: npt.NDArray | None = None
144146

145147
def __call__(
146-
self, state: ndarray | None = None, **kwargs: Any
147-
) -> tuple[ndarray, ndarray]:
148+
self, state: npt.NDArray | None = None, **kwargs: Any
149+
) -> tuple[npt.NDArray, npt.NDArray]:
148150
if state is not None:
149151
self.state = state
150152

@@ -164,14 +166,14 @@ def __call__(
164166
return value, grads
165167

166168
@property
167-
def state(self) -> ndarray:
169+
def state(self) -> npt.NDArray:
168170
return self._get_state()
169171

170172
@state.setter
171-
def state(self, state: ndarray) -> None:
173+
def state(self, state: npt.NDArray) -> None:
172174
self._set_state(state)
173175

174-
def _get_gradient_ndarray(self, fill_value: float | None = None) -> ndarray:
176+
def _get_gradient_ndarray(self, fill_value: float | None = None) -> npt.NDArray:
175177
if self.persistent and self._gradient_ndarray is not None:
176178
if fill_value is not None:
177179
self._gradient_ndarray.fill(fill_value)

botorch/optim/core.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717
from time import monotonic
1818
from typing import Any
1919

20+
import numpy.typing as npt
21+
2022
from botorch.optim.closures import NdarrayOptimizationClosure
2123
from botorch.optim.utils.numpy_utils import get_bounds_as_ndarray
2224
from botorch.optim.utils.timeout import minimize_with_timeout
@@ -60,7 +62,7 @@ def scipy_minimize(
6062
parameters: dict[str, Tensor],
6163
bounds: dict[str, tuple[float | None, float | None]] | None = None,
6264
callback: Callable[[dict[str, Tensor], OptimizationResult], None] | None = None,
63-
x0: ndarray | None = None,
65+
x0: npt.NDArray | None = None,
6466
method: str = "L-BFGS-B",
6567
options: dict[str, Any] | None = None,
6668
timeout_sec: float | None = None,
@@ -98,7 +100,7 @@ def scipy_minimize(
98100
else:
99101
call_counter = count(1) # callbacks are typically made at the end of each iter
100102

101-
def wrapped_callback(x: ndarray):
103+
def wrapped_callback(x: npt.NDArray):
102104
result = OptimizationResult(
103105
step=next(call_counter),
104106
fval=float(wrapped_closure(x)[0]),

botorch/optim/parameter_constraints.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from typing import Union
1717

1818
import numpy as np
19+
import numpy.typing as npt
1920
import torch
2021
from botorch.exceptions.errors import CandidateGenerationError, UnsupportedError
2122
from scipy.optimize import Bounds
@@ -131,7 +132,7 @@ def make_scipy_linear_constraints(
131132

132133

133134
def eval_lin_constraint(
134-
x: np.ndarray, flat_idxr: list[int], coeffs: np.ndarray, rhs: float
135+
x: npt.NDArray, flat_idxr: list[int], coeffs: npt.NDArray, rhs: float
135136
) -> np.float64:
136137
r"""Evaluate a single linear constraint.
137138
@@ -148,8 +149,8 @@ def eval_lin_constraint(
148149

149150

150151
def lin_constraint_jac(
151-
x: np.ndarray, flat_idxr: list[int], coeffs: np.ndarray, n: int
152-
) -> np.ndarray:
152+
x: npt.NDArray, flat_idxr: list[int], coeffs: npt.NDArray, n: int
153+
) -> npt.NDArray:
153154
r"""Return the Jacobian associated with a linear constraint.
154155
155156
Args:
@@ -167,7 +168,7 @@ def lin_constraint_jac(
167168
return jac
168169

169170

170-
def _arrayify(X: Tensor) -> np.ndarray:
171+
def _arrayify(X: Tensor) -> npt.NDArray:
171172
r"""Convert a torch.Tensor (any dtype or device) to a numpy (double) array.
172173
173174
Args:

botorch/optim/utils/common.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,13 @@
1414
from warnings import warn_explicit, WarningMessage
1515

1616
import numpy as np
17+
import numpy.typing as npt
1718
from linear_operator.utils.errors import NanError, NotPSDError
1819

1920

2021
def _handle_numerical_errors(
21-
error: RuntimeError, x: np.ndarray, dtype: np.dtype | None = None
22-
) -> tuple[np.ndarray, np.ndarray]:
22+
error: RuntimeError, x: npt.NDArray, dtype: np.dtype | None = None
23+
) -> tuple[npt.NDArray, npt.NDArray]:
2324
if isinstance(error, NotPSDError):
2425
raise error
2526
error_message = error.args[0] if len(error.args) > 0 else ""

botorch/optim/utils/numpy_utils.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from itertools import tee
1414

1515
import numpy as np
16+
import numpy.typing as npt
1617
import torch
1718
from numpy import ndarray
1819
from torch import Tensor
@@ -35,7 +36,7 @@
3536

3637
def as_ndarray(
3738
values: Tensor, dtype: np.dtype | None = None, inplace: bool = True
38-
) -> ndarray:
39+
) -> npt.NDArray:
3940
r"""Helper for going from torch.Tensor to numpy.ndarray.
4041
4142
Args:
@@ -67,10 +68,10 @@ def as_ndarray(
6768

6869
def get_tensors_as_ndarray_1d(
6970
tensors: Iterator[Tensor] | dict[str, Tensor],
70-
out: ndarray | None = None,
71+
out: npt.NDArray | None = None,
7172
dtype: np.dtype | str | None = None,
72-
as_array: Callable[[Tensor], ndarray] = as_ndarray,
73-
) -> ndarray:
73+
as_array: Callable[[Tensor], npt.NDArray] = as_ndarray,
74+
) -> npt.NDArray:
7475
# Create a pair of iterators, one for setup and one for data transfer
7576
named_tensors_iter, named_tensors_iter2 = tee(
7677
iter(tensors.items()) if isinstance(tensors, dict) else enumerate(tensors), 2
@@ -112,8 +113,8 @@ def get_tensors_as_ndarray_1d(
112113

113114
def set_tensors_from_ndarray_1d(
114115
tensors: Iterator[Tensor] | dict[str, Tensor],
115-
array: ndarray,
116-
as_tensor: Callable[[ndarray], Tensor] = torch.as_tensor,
116+
array: npt.NDArray,
117+
as_tensor: Callable[[npt.NDArray], Tensor] = torch.as_tensor,
117118
) -> None:
118119
r"""Sets the values of one more tensors based off of a vector of assignments."""
119120
named_tensors_iter = (
@@ -137,7 +138,7 @@ def set_tensors_from_ndarray_1d(
137138
def get_bounds_as_ndarray(
138139
parameters: dict[str, Tensor],
139140
bounds: dict[str, tuple[float | Tensor | None, float | Tensor | None]],
140-
) -> np.ndarray | None:
141+
) -> npt.NDArray | None:
141142
r"""Helper method for converting bounds into an ndarray.
142143
143144
Args:

botorch/optim/utils/timeout.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,14 @@
1212
from typing import Any
1313

1414
import numpy as np
15+
import numpy.typing as npt
1516
from botorch.exceptions.errors import OptimizationTimeoutError
1617
from scipy import optimize
1718

1819

1920
def minimize_with_timeout(
20-
fun: Callable[[np.ndarray, ...], float],
21-
x0: np.ndarray,
21+
fun: Callable[[npt.NDArray, ...], float],
22+
x0: npt.NDArray,
2223
args: tuple[Any, ...] = (),
2324
method: str | None = None,
2425
jac: str | Callable | bool | None = None,
@@ -45,7 +46,7 @@ def minimize_with_timeout(
4546
start_time = time.monotonic()
4647
callback_data = {"num_iterations": 0} # update from withing callback below
4748

48-
def timeout_callback(xk: np.ndarray) -> bool:
49+
def timeout_callback(xk: npt.NDArray) -> bool:
4950
runtime = time.monotonic() - start_time
5051
callback_data["num_iterations"] += 1
5152
if runtime > timeout_sec:
@@ -63,14 +64,14 @@ def timeout_callback(xk: np.ndarray) -> bool:
6364
elif method == "trust-constr": # special signature
6465

6566
def wrapped_callback(
66-
xk: np.ndarray, state: optimize.OptimizeResult
67+
xk: npt.NDArray, state: optimize.OptimizeResult
6768
) -> bool:
6869
# order here is important to make sure base callback gets executed
6970
return callback(xk, state) or timeout_callback(xk=xk)
7071

7172
else:
7273

73-
def wrapped_callback(xk: np.ndarray) -> None:
74+
def wrapped_callback(xk: npt.NDArray) -> None:
7475
timeout_callback(xk=xk)
7576
callback(xk)
7677

botorch/utils/sampling.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
from typing import Any, TYPE_CHECKING
2525

2626
import numpy as np
27+
import numpy.typing as npt
2728
import scipy
2829
import torch
2930
from botorch.exceptions.errors import BotorchError
@@ -374,11 +375,11 @@ def _convert_bounds_to_inequality_constraints(bounds: Tensor) -> tuple[Tensor, T
374375

375376

376377
def find_interior_point(
377-
A: np.ndarray,
378-
b: np.ndarray,
379-
A_eq: np.ndarray | None = None,
380-
b_eq: np.ndarray | None = None,
381-
) -> np.ndarray:
378+
A: npt.NDArray,
379+
b: npt.NDArray,
380+
A_eq: npt.NDArray | None = None,
381+
b_eq: npt.NDArray | None = None,
382+
) -> npt.NDArray:
382383
r"""Find an interior point of a polytope via linear programming.
383384
384385
Args:

test/optim/test_parameter_constraints.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from itertools import product
99

1010
import numpy as np
11+
import numpy.typing as npt
1112
import torch
1213
from botorch.exceptions.errors import CandidateGenerationError, UnsupportedError
1314
from botorch.optim.parameter_constraints import (
@@ -55,7 +56,7 @@ def test_make_nonlinear_constraints(self):
5556
def nlc(x):
5657
return 4 - x.sum()
5758

58-
def f_np_wrapper(x: np.ndarray, f: Callable):
59+
def f_np_wrapper(x: npt.NDArray, f: Callable):
5960
"""Given a torch callable, compute value + grad given a numpy array."""
6061
X = (
6162
torch.from_numpy(x)
@@ -114,7 +115,7 @@ def test_make_scipy_nonlinear_inequality_constraints(self):
114115
def nlc(x):
115116
return 4 - x.sum()
116117

117-
def f_np_wrapper(x: np.ndarray, f: Callable):
118+
def f_np_wrapper(x: npt.NDArray, f: Callable):
118119
"""Given a torch callable, compute value + grad given a numpy array."""
119120
X = (
120121
torch.from_numpy(x)

0 commit comments

Comments
 (0)