diff --git a/.pylintdict b/.pylintdict
index 34bef92c3..5942e917c 100644
--- a/.pylintdict
+++ b/.pylintdict
@@ -253,6 +253,7 @@ izaac
izz
jac
jacobian
+johnson
jm
jonathan
jones
@@ -487,6 +488,7 @@ sanjiv
sashank
satisfiability
satyen
+sbplx
scalability
schroediger
schroedinger
@@ -529,6 +531,7 @@ stdout
stefano
steppable
stepsize
+steven
str
stratifications
stratification
@@ -536,6 +539,7 @@ subcircuits
subclassed
subclasses
subcomponents
+subplex
submodules
subobjects
subseteq
@@ -627,4 +631,4 @@ zz
ω
φ_i
φ_ij
-Δ
\ No newline at end of file
+Δ
diff --git a/qiskit_machine_learning/optimizers/__init__.py b/qiskit_machine_learning/optimizers/__init__.py
index 65102d931..df5653478 100644
--- a/qiskit_machine_learning/optimizers/__init__.py
+++ b/qiskit_machine_learning/optimizers/__init__.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -32,56 +32,57 @@
----------------------
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
+ :toctree: ../stubs/
+ :nosignatures:
- OptimizerResult
- Optimizer
- Minimizer
+ OptimizerResult
+ Optimizer
+ Minimizer
Steppable optimization
----------------------
.. autosummary::
- :toctree: ../stubs/
+ :toctree: ../stubs/
- optimizer_utils
+ optimizer_utils
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
+ :toctree: ../stubs/
+ :nosignatures:
- SteppableOptimizer
- AskData
- TellData
- OptimizerState
+ SteppableOptimizer
+ AskData
+ TellData
+ OptimizerState
Local optimizers
----------------
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
-
- ADAM
- AQGD
- CG
- COBYLA
- L_BFGS_B
- GSLS
- GradientDescent
- GradientDescentState
- NELDER_MEAD
- NFT
- P_BFGS
- POWELL
- SLSQP
- SPSA
- QNSPSA
- TNC
- SciPyOptimizer
- UMDA
+ :toctree: ../stubs/
+ :nosignatures:
+
+ ADAM
+ AQGD
+ CG
+ COBYLA
+ L_BFGS_B
+ GSLS
+ GradientDescent
+ GradientDescentState
+ NELDER_MEAD
+ NFT
+ P_BFGS
+ POWELL
+ SLSQP
+ SPSA
+ QNSPSA
+ TNC
+ SciPyOptimizer
+ UMDA
+
The optimizers from
`scikit-quant `_ are not included in the
@@ -91,6 +92,17 @@
https://github.com/qiskit-community/qiskit-algorithms/issues/84.
+Qiskit also provides local optimizers based on
+`NLOpt `_.
+See Global Optimizers section below for the optional NLOpt installation instructions.
+
+.. autosummary::
+ :toctree: ../stubs/
+ :nosignatures:
+
+ SBPLX
+
+
Global optimizers
-----------------
The global optimizers here all use `NLOpt `_ for their
@@ -98,14 +110,14 @@
To install the ``NLOpt`` dependent package you can use ``pip install nlopt``.
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
+ :toctree: ../stubs/
+ :nosignatures:
- CRS
- DIRECT_L
- DIRECT_L_RAND
- ESCH
- ISRES
+ CRS
+ DIRECT_L
+ DIRECT_L_RAND
+ ESCH
+ ISRES
"""
@@ -123,6 +135,7 @@
from .nlopts.direct_l_rand import DIRECT_L_RAND
from .nlopts.esch import ESCH
from .nlopts.isres import ISRES
+from .nlopts.sbplx import SBPLX
from .steppable_optimizer import SteppableOptimizer, AskData, TellData, OptimizerState
from .optimizer import Minimizer, Optimizer, OptimizerResult, OptimizerSupportLevel
from .p_bfgs import P_BFGS
@@ -165,5 +178,6 @@
"DIRECT_L_RAND",
"ESCH",
"ISRES",
+ "SBPLX",
"UMDA",
]
diff --git a/qiskit_machine_learning/optimizers/nlopts/__init__.py b/qiskit_machine_learning/optimizers/nlopts/__init__.py
index 59bf5f86c..4a2be93dd 100644
--- a/qiskit_machine_learning/optimizers/nlopts/__init__.py
+++ b/qiskit_machine_learning/optimizers/nlopts/__init__.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -10,11 +10,12 @@
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
-"""NLopt based global optimizers"""
+"""NLopt-based global and local optimizers"""
from .crs import CRS
from .direct_l import DIRECT_L
from .direct_l_rand import DIRECT_L_RAND
from .esch import ESCH
from .isres import ISRES
+from .sbplx import SBPLX
from .nloptimizer import NLoptOptimizer
diff --git a/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py b/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
index d04e1d4d1..3c977fe89 100644
--- a/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
+++ b/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -33,12 +33,13 @@ class NLoptOptimizerType(Enum):
GN_DIRECT_L = 3
GN_ESCH = 4
GN_ISRES = 5
+ LN_SBPLX = 6
@_optionals.HAS_NLOPT.require_in_instance
class NLoptOptimizer(Optimizer):
"""
- NLopt global optimizer base class
+ NLopt local and global optimizer base class
"""
_OPTIONS = ["max_evals"]
@@ -64,6 +65,7 @@ def __init__(self, max_evals: int = 1000) -> None: # pylint: disable=unused-arg
NLoptOptimizerType.GN_DIRECT_L: nlopt.GN_DIRECT_L,
NLoptOptimizerType.GN_ESCH: nlopt.GN_ESCH,
NLoptOptimizerType.GN_ISRES: nlopt.GN_ISRES,
+ NLoptOptimizerType.LN_SBPLX: nlopt.LN_SBPLX,
}
@abstractmethod
diff --git a/qiskit_machine_learning/optimizers/nlopts/sbplx.py b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
new file mode 100644
index 000000000..d67991292
--- /dev/null
+++ b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
@@ -0,0 +1,36 @@
+# This code is part of a Qiskit project.
+#
+# (C) Copyright IBM 2025.
+#
+# This code is licensed under the Apache License, Version 2.0. You may
+# obtain a copy of this license in the LICENSE.txt file in the root directory
+# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
+#
+# Any modifications or derivative works of this code must retain this
+# copyright notice, and modified files need to carry a notice indicating
+# that they have been altered from the originals.
+
+"""Sbplx (Subplex) optimizer."""
+
+from .nloptimizer import NLoptOptimizer, NLoptOptimizerType
+
+
+class SBPLX(NLoptOptimizer):
+ """
+ Subplex optimizer.
+
+ 'Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
+ is claimed to be much more efficient and robust than the original Nelder-Mead,
+ while retaining the latter's facility with discontinuous objectives.
+ While these claims seem to be true in many cases, we could not find any proof that
+ Subplex is globally convergent, and perhaps it may fail for some objective functions
+ like Nelder-Mead; YMMV.)', by Steven G. Johnson, author of NLopt library.
+
+ NLopt local optimizer, derivative-free.
+ For further detail, please refer to
+ https://nlopt.readthedocs.io/en/latest/NLopt_Algorithms/#sbplx-based-on-subplex
+ """
+
+ def get_nlopt_optimizer(self) -> NLoptOptimizerType:
+ """Return NLopt optimizer type."""
+ return NLoptOptimizerType.LN_SBPLX
diff --git a/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml b/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml
new file mode 100644
index 000000000..4790ec6f6
--- /dev/null
+++ b/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml
@@ -0,0 +1,8 @@
+---
+features:
+ - |
+ Support for :class:`.SBPLX` optimizer from NLopt library has been added.
+ SBPLX is a local gradient-free optimizer based on Nelder-Mead and
+ is expected to show better convergence behavior.
+ Further information about this optimizer and the others can be found in
+ the API ref for the :mod:`~qiskit_machine_learning.optimizers`.
diff --git a/test/optimizers/test_optimizers.py b/test/optimizers/test_optimizers.py
index 0ad5975f4..719dc76a1 100644
--- a/test/optimizers/test_optimizers.py
+++ b/test/optimizers/test_optimizers.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -39,6 +39,7 @@
Optimizer,
P_BFGS,
POWELL,
+ SBPLX,
SLSQP,
SPSA,
QNSPSA,
@@ -221,6 +222,7 @@ def test_scipy_optimizer_parse_bounds(self):
(CRS, False),
(DIRECT_L, False),
(DIRECT_L_RAND, False),
+ (SBPLX, True),
)
@unpack
def test_nlopt(self, optimizer_cls, use_bound):