From 359cbaf517055fa9de89d7bdd743c1c5718b439d Mon Sep 17 00:00:00 2001
From: Edoardo Altamura <38359901+edoaltamura@users.noreply.github.com>
Date: Thu, 15 May 2025 14:47:12 +0100
Subject: [PATCH 1/4] Port optimizer update from Algorithms
---
.pylintdict | 6 ++-
.../optimizers/__init__.py | 13 ++++++-
.../optimizers/nlopts/nloptimizer.py | 6 ++-
.../optimizers/nlopts/sbplx.py | 37 +++++++++++++++++++
.../add-sbplx-optimizer-d23fb5925879fdae.yaml | 8 ++++
test/optimizers/test_optimizers.py | 4 +-
6 files changed, 69 insertions(+), 5 deletions(-)
create mode 100644 qiskit_machine_learning/optimizers/nlopts/sbplx.py
create mode 100644 releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml
diff --git a/.pylintdict b/.pylintdict
index 34bef92c3..5942e917c 100644
--- a/.pylintdict
+++ b/.pylintdict
@@ -253,6 +253,7 @@ izaac
izz
jac
jacobian
+johnson
jm
jonathan
jones
@@ -487,6 +488,7 @@ sanjiv
sashank
satisfiability
satyen
+sbplx
scalability
schroediger
schroedinger
@@ -529,6 +531,7 @@ stdout
stefano
steppable
stepsize
+steven
str
stratifications
stratification
@@ -536,6 +539,7 @@ subcircuits
subclassed
subclasses
subcomponents
+subplex
submodules
subobjects
subseteq
@@ -627,4 +631,4 @@ zz
ω
φ_i
φ_ij
-Δ
\ No newline at end of file
+Δ
diff --git a/qiskit_machine_learning/optimizers/__init__.py b/qiskit_machine_learning/optimizers/__init__.py
index 65102d931..5e989a622 100644
--- a/qiskit_machine_learning/optimizers/__init__.py
+++ b/qiskit_machine_learning/optimizers/__init__.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -91,6 +91,15 @@
https://github.com/qiskit-community/qiskit-algorithms/issues/84.
+Qiskit also provides local optimizers based on
+`NLOpt `_.
+See Global Optimizers section below for the optional NLOpt installation instructions.
+.. autosummary::
+ :toctree: ../stubs/
+ :nosignatures:
+ SBPLX
+
+
Global optimizers
-----------------
The global optimizers here all use `NLOpt `_ for their
@@ -123,6 +132,7 @@
from .nlopts.direct_l_rand import DIRECT_L_RAND
from .nlopts.esch import ESCH
from .nlopts.isres import ISRES
+from .nlopts.sbplx import SBPLX
from .steppable_optimizer import SteppableOptimizer, AskData, TellData, OptimizerState
from .optimizer import Minimizer, Optimizer, OptimizerResult, OptimizerSupportLevel
from .p_bfgs import P_BFGS
@@ -165,5 +175,6 @@
"DIRECT_L_RAND",
"ESCH",
"ISRES",
+ "SBPLX",
"UMDA",
]
diff --git a/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py b/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
index d04e1d4d1..3c977fe89 100644
--- a/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
+++ b/qiskit_machine_learning/optimizers/nlopts/nloptimizer.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -33,12 +33,13 @@ class NLoptOptimizerType(Enum):
GN_DIRECT_L = 3
GN_ESCH = 4
GN_ISRES = 5
+ LN_SBPLX = 6
@_optionals.HAS_NLOPT.require_in_instance
class NLoptOptimizer(Optimizer):
"""
- NLopt global optimizer base class
+ NLopt local and global optimizer base class
"""
_OPTIONS = ["max_evals"]
@@ -64,6 +65,7 @@ def __init__(self, max_evals: int = 1000) -> None: # pylint: disable=unused-arg
NLoptOptimizerType.GN_DIRECT_L: nlopt.GN_DIRECT_L,
NLoptOptimizerType.GN_ESCH: nlopt.GN_ESCH,
NLoptOptimizerType.GN_ISRES: nlopt.GN_ISRES,
+ NLoptOptimizerType.LN_SBPLX: nlopt.LN_SBPLX,
}
@abstractmethod
diff --git a/qiskit_machine_learning/optimizers/nlopts/sbplx.py b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
new file mode 100644
index 000000000..96b88e101
--- /dev/null
+++ b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
@@ -0,0 +1,37 @@
+# This code is part of a Qiskit project.
+#
+# (C) Copyright IBM 2025.
+#
+# This code is licensed under the Apache License, Version 2.0. You may
+# obtain a copy of this license in the LICENSE.txt file in the root directory
+# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
+#
+# Any modifications or derivative works of this code must retain this
+# copyright notice, and modified files need to carry a notice indicating
+# that they have been altered from the originals.
+
+"""Sbplx (Subplex) optimizer."""
+
+from .nloptimizer import NLoptOptimizer, NLoptOptimizerType
+
+
+class SBPLX(NLoptOptimizer):
+ """
+ Subplex optimizer.
+
+ "Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
+ is claimed to be much more efficient and robust than the original Nelder-Mead,
+ while retaining the latter's facility with discontinuous objectives,
+ and in my experience these claims seem to be true in many cases.
+ (However, I'm not aware of any proof that Subplex is globally convergent,
+ and perhaps it may fail for some objectives like Nelder-Mead; YMMV.)"
+ Description by Steven G. Johnson, author of NLopt library.
+
+ NLopt local optimizer, derivative-free.
+ For further detail, please refer to
+ https://nlopt.readthedocs.io/en/latest/NLopt_Algorithms/#sbplx-based-on-subplex
+ """
+
+ def get_nlopt_optimizer(self) -> NLoptOptimizerType:
+ """Return NLopt optimizer type."""
+ return NLoptOptimizerType.LN_SBPLX
diff --git a/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml b/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml
new file mode 100644
index 000000000..4790ec6f6
--- /dev/null
+++ b/releasenotes/notes/add-sbplx-optimizer-d23fb5925879fdae.yaml
@@ -0,0 +1,8 @@
+---
+features:
+ - |
+ Support for :class:`.SBPLX` optimizer from NLopt library has been added.
+ SBPLX is a local gradient-free optimizer based on Nelder-Mead and
+ is expected to show better convergence behavior.
+ Further information about this optimizer and the others can be found in
+ the API ref for the :mod:`~qiskit_machine_learning.optimizers`.
diff --git a/test/optimizers/test_optimizers.py b/test/optimizers/test_optimizers.py
index 0ad5975f4..719dc76a1 100644
--- a/test/optimizers/test_optimizers.py
+++ b/test/optimizers/test_optimizers.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -39,6 +39,7 @@
Optimizer,
P_BFGS,
POWELL,
+ SBPLX,
SLSQP,
SPSA,
QNSPSA,
@@ -221,6 +222,7 @@ def test_scipy_optimizer_parse_bounds(self):
(CRS, False),
(DIRECT_L, False),
(DIRECT_L_RAND, False),
+ (SBPLX, True),
)
@unpack
def test_nlopt(self, optimizer_cls, use_bound):
From 19eab88176070c8ac68fa5073446924ee63294a0 Mon Sep 17 00:00:00 2001
From: Edoardo Altamura <38359901+edoaltamura@users.noreply.github.com>
Date: Thu, 15 May 2025 15:28:07 +0100
Subject: [PATCH 2/4] Fix indentation html
---
qiskit_machine_learning/optimizers/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/qiskit_machine_learning/optimizers/__init__.py b/qiskit_machine_learning/optimizers/__init__.py
index 5e989a622..6e540c59b 100644
--- a/qiskit_machine_learning/optimizers/__init__.py
+++ b/qiskit_machine_learning/optimizers/__init__.py
@@ -83,6 +83,7 @@
SciPyOptimizer
UMDA
+
The optimizers from
`scikit-quant `_ are not included in the
Qiskit Machine Learning library.
From 26f6bba8bf9cac4bda4ccd86bb49a2d3c1c569e8 Mon Sep 17 00:00:00 2001
From: Edoardo Altamura <38359901+edoaltamura@users.noreply.github.com>
Date: Thu, 15 May 2025 17:25:02 +0100
Subject: [PATCH 3/4] Fix indentation and polish up inits
---
.../optimizers/__init__.py | 92 ++++++++++---------
.../optimizers/nlopts/__init__.py | 3 +-
.../optimizers/nlopts/sbplx.py | 11 +--
3 files changed, 54 insertions(+), 52 deletions(-)
diff --git a/qiskit_machine_learning/optimizers/__init__.py b/qiskit_machine_learning/optimizers/__init__.py
index 6e540c59b..df5653478 100644
--- a/qiskit_machine_learning/optimizers/__init__.py
+++ b/qiskit_machine_learning/optimizers/__init__.py
@@ -32,56 +32,56 @@
----------------------
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
+ :toctree: ../stubs/
+ :nosignatures:
- OptimizerResult
- Optimizer
- Minimizer
+ OptimizerResult
+ Optimizer
+ Minimizer
Steppable optimization
----------------------
.. autosummary::
- :toctree: ../stubs/
+ :toctree: ../stubs/
- optimizer_utils
+ optimizer_utils
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
+ :toctree: ../stubs/
+ :nosignatures:
- SteppableOptimizer
- AskData
- TellData
- OptimizerState
+ SteppableOptimizer
+ AskData
+ TellData
+ OptimizerState
Local optimizers
----------------
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
-
- ADAM
- AQGD
- CG
- COBYLA
- L_BFGS_B
- GSLS
- GradientDescent
- GradientDescentState
- NELDER_MEAD
- NFT
- P_BFGS
- POWELL
- SLSQP
- SPSA
- QNSPSA
- TNC
- SciPyOptimizer
- UMDA
+ :toctree: ../stubs/
+ :nosignatures:
+
+ ADAM
+ AQGD
+ CG
+ COBYLA
+ L_BFGS_B
+ GSLS
+ GradientDescent
+ GradientDescentState
+ NELDER_MEAD
+ NFT
+ P_BFGS
+ POWELL
+ SLSQP
+ SPSA
+ QNSPSA
+ TNC
+ SciPyOptimizer
+ UMDA
The optimizers from
@@ -95,10 +95,12 @@
Qiskit also provides local optimizers based on
`NLOpt `_.
See Global Optimizers section below for the optional NLOpt installation instructions.
+
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
- SBPLX
+ :toctree: ../stubs/
+ :nosignatures:
+
+ SBPLX
Global optimizers
@@ -108,14 +110,14 @@
To install the ``NLOpt`` dependent package you can use ``pip install nlopt``.
.. autosummary::
- :toctree: ../stubs/
- :nosignatures:
-
- CRS
- DIRECT_L
- DIRECT_L_RAND
- ESCH
- ISRES
+ :toctree: ../stubs/
+ :nosignatures:
+
+ CRS
+ DIRECT_L
+ DIRECT_L_RAND
+ ESCH
+ ISRES
"""
diff --git a/qiskit_machine_learning/optimizers/nlopts/__init__.py b/qiskit_machine_learning/optimizers/nlopts/__init__.py
index 59bf5f86c..b7e287391 100644
--- a/qiskit_machine_learning/optimizers/nlopts/__init__.py
+++ b/qiskit_machine_learning/optimizers/nlopts/__init__.py
@@ -10,11 +10,12 @@
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
-"""NLopt based global optimizers"""
+"""NLopt-based global and local optimizers"""
from .crs import CRS
from .direct_l import DIRECT_L
from .direct_l_rand import DIRECT_L_RAND
from .esch import ESCH
from .isres import ISRES
+from .sbplx import SBPLX
from .nloptimizer import NLoptOptimizer
diff --git a/qiskit_machine_learning/optimizers/nlopts/sbplx.py b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
index 96b88e101..d67991292 100644
--- a/qiskit_machine_learning/optimizers/nlopts/sbplx.py
+++ b/qiskit_machine_learning/optimizers/nlopts/sbplx.py
@@ -19,13 +19,12 @@ class SBPLX(NLoptOptimizer):
"""
Subplex optimizer.
- "Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
+ 'Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
is claimed to be much more efficient and robust than the original Nelder-Mead,
- while retaining the latter's facility with discontinuous objectives,
- and in my experience these claims seem to be true in many cases.
- (However, I'm not aware of any proof that Subplex is globally convergent,
- and perhaps it may fail for some objectives like Nelder-Mead; YMMV.)"
- Description by Steven G. Johnson, author of NLopt library.
+ while retaining the latter's facility with discontinuous objectives.
+ While these claims seem to be true in many cases, we could not find any proof that
+ Subplex is globally convergent, and perhaps it may fail for some objective functions
+ like Nelder-Mead; YMMV.)', by Steven G. Johnson, author of NLopt library.
NLopt local optimizer, derivative-free.
For further detail, please refer to
From 17a57355a8e267654fefc29379a1d55083ad6469 Mon Sep 17 00:00:00 2001
From: Edoardo Altamura <38359901+edoaltamura@users.noreply.github.com>
Date: Fri, 16 May 2025 11:51:09 +0100
Subject: [PATCH 4/4] Fix copyright
---
qiskit_machine_learning/optimizers/nlopts/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/qiskit_machine_learning/optimizers/nlopts/__init__.py b/qiskit_machine_learning/optimizers/nlopts/__init__.py
index b7e287391..4a2be93dd 100644
--- a/qiskit_machine_learning/optimizers/nlopts/__init__.py
+++ b/qiskit_machine_learning/optimizers/nlopts/__init__.py
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
-# (C) Copyright IBM 2018, 2024.
+# (C) Copyright IBM 2018, 2025.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory