Add gradient methods to FinDiffGradients (#9104)

* implement gradient methods

* fix docs

* arg order/refactor

* fix docs

* revert central difference

* Update qiskit/algorithms/gradients/finite_diff_estimator_gradient.py

* fix docs

* Update qiskit/algorithms/gradients/finite_diff_sampler_gradient.py

Co-authored-by: Julien Gacon <gaconju@gmail.com>

Co-authored-by: Julien Gacon <gaconju@gmail.com>
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
This commit is contained in:
Ikko Hamamura 2022-11-18 00:50:26 +09:00 committed by GitHub
parent 30f45fe79a
commit 2bd4afacdc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 178 additions and 114 deletions

View File

@ -14,6 +14,7 @@
from __future__ import annotations from __future__ import annotations
import sys
from typing import Sequence from typing import Sequence
import numpy as np import numpy as np
@ -28,14 +29,27 @@ from qiskit.quantum_info.operators.base_operator import BaseOperator
from .base_estimator_gradient import BaseEstimatorGradient from .base_estimator_gradient import BaseEstimatorGradient
from .estimator_gradient_result import EstimatorGradientResult from .estimator_gradient_result import EstimatorGradientResult
if sys.version_info >= (3, 8):
# pylint: disable=no-name-in-module, ungrouped-imports
from typing import Literal
else:
from typing_extensions import Literal
class FiniteDiffEstimatorGradient(BaseEstimatorGradient): class FiniteDiffEstimatorGradient(BaseEstimatorGradient):
""" """
Compute the gradients of the expectation values by finite difference method. Compute the gradients of the expectation values by finite difference method.
""" """
def __init__(self, estimator: BaseEstimator, epsilon: float, options: Options | None = None): def __init__(
""" self,
estimator: BaseEstimator,
epsilon: float,
options: Options | None = None,
*,
method: Literal["central", "forward", "backward"] = "central",
):
r"""
Args: Args:
estimator: The estimator used to compute the gradients. estimator: The estimator used to compute the gradients.
epsilon: The offset size for the finite difference gradients. epsilon: The offset size for the finite difference gradients.
@ -43,14 +57,27 @@ class FiniteDiffEstimatorGradient(BaseEstimatorGradient):
The order of priority is: options in ``run`` method > gradient's The order of priority is: options in ``run`` method > gradient's
default options > primitive's default setting. default options > primitive's default setting.
Higher priority setting overrides lower priority setting Higher priority setting overrides lower priority setting
method: The computation method of the gradients.
- ``central`` computes :math:`\frac{f(x+e)-f(x-e)}{2e}`,
- ``forward`` computes :math:`\frac{f(x+e) - f(x)}{e}`,
- ``backward`` computes :math:`\frac{f(x)-f(x-e)}{e}`
where :math:`e` is epsilon.
Raises: Raises:
ValueError: If ``epsilon`` is not positive. ValueError: If ``epsilon`` is not positive.
TypeError: If ``method`` is invalid.
""" """
if epsilon <= 0: if epsilon <= 0:
raise ValueError(f"epsilon ({epsilon}) should be positive.") raise ValueError(f"epsilon ({epsilon}) should be positive.")
self._epsilon = epsilon self._epsilon = epsilon
self._base_parameter_values_dict = {} self._base_parameter_values_dict = {}
if method not in ("central", "forward", "backward"):
raise TypeError(
f"The argument method should be central, forward, or backward: {method} is given."
)
self._method = method
super().__init__(estimator, options) super().__init__(estimator, options)
def _run( def _run(
@ -74,12 +101,25 @@ class FiniteDiffEstimatorGradient(BaseEstimatorGradient):
metadata_.append({"parameters": [circuit.parameters[idx] for idx in indices]}) metadata_.append({"parameters": [circuit.parameters[idx] for idx in indices]})
offset = np.identity(circuit.num_parameters)[indices, :] offset = np.identity(circuit.num_parameters)[indices, :]
plus = parameter_values_ + self._epsilon * offset if self._method == "central":
minus = parameter_values_ - self._epsilon * offset plus = parameter_values_ + self._epsilon * offset
n = 2 * len(indices) minus = parameter_values_ - self._epsilon * offset
job = self._estimator.run( n = 2 * len(indices)
[circuit] * n, [observable] * n, plus.tolist() + minus.tolist(), **options job = self._estimator.run(
) [circuit] * n, [observable] * n, plus.tolist() + minus.tolist(), **options
)
elif self._method == "forward":
plus = parameter_values_ + self._epsilon * offset
n = len(indices) + 1
job = self._estimator.run(
[circuit] * n, [observable] * n, [parameter_values_] + plus.tolist(), **options
)
elif self._method == "backward":
minus = parameter_values_ - self._epsilon * offset
n = len(indices) + 1
job = self._estimator.run(
[circuit] * n, [observable] * n, [parameter_values_] + minus.tolist(), **options
)
jobs.append(job) jobs.append(job)
# combine the results # combine the results
@ -90,8 +130,13 @@ class FiniteDiffEstimatorGradient(BaseEstimatorGradient):
gradients = [] gradients = []
for result in results: for result in results:
n = len(result.values) // 2 # is always a multiple of 2 if self._method == "central":
gradient_ = (result.values[:n] - result.values[n:]) / (2 * self._epsilon) n = len(result.values) // 2 # is always a multiple of 2
gradient_ = (result.values[:n] - result.values[n:]) / (2 * self._epsilon)
elif self._method == "forward":
gradient_ = (result.values[1:] - result.values[0]) / self._epsilon
elif self._method == "backward":
gradient_ = (result.values[0] - result.values[1:]) / self._epsilon
gradients.append(gradient_) gradients.append(gradient_)
opt = self._get_local_options(options) opt = self._get_local_options(options)
return EstimatorGradientResult(gradients=gradients, metadata=metadata_, options=opt) return EstimatorGradientResult(gradients=gradients, metadata=metadata_, options=opt)

View File

@ -14,6 +14,7 @@
from __future__ import annotations from __future__ import annotations
import sys
from typing import Sequence from typing import Sequence
import numpy as np import numpy as np
@ -26,6 +27,12 @@ from qiskit.providers import Options
from .base_sampler_gradient import BaseSamplerGradient from .base_sampler_gradient import BaseSamplerGradient
from .sampler_gradient_result import SamplerGradientResult from .sampler_gradient_result import SamplerGradientResult
if sys.version_info >= (3, 8):
# pylint: disable=no-name-in-module, ungrouped-imports
from typing import Literal
else:
from typing_extensions import Literal
class FiniteDiffSamplerGradient(BaseSamplerGradient): class FiniteDiffSamplerGradient(BaseSamplerGradient):
"""Compute the gradients of the sampling probability by finite difference method.""" """Compute the gradients of the sampling probability by finite difference method."""
@ -35,8 +42,10 @@ class FiniteDiffSamplerGradient(BaseSamplerGradient):
sampler: BaseSampler, sampler: BaseSampler,
epsilon: float, epsilon: float,
options: Options | None = None, options: Options | None = None,
*,
method: Literal["central", "forward", "backward"] = "central",
): ):
""" r"""
Args: Args:
sampler: The sampler used to compute the gradients. sampler: The sampler used to compute the gradients.
epsilon: The offset size for the finite difference gradients. epsilon: The offset size for the finite difference gradients.
@ -44,13 +53,26 @@ class FiniteDiffSamplerGradient(BaseSamplerGradient):
The order of priority is: options in ``run`` method > gradient's The order of priority is: options in ``run`` method > gradient's
default options > primitive's default setting. default options > primitive's default setting.
Higher priority setting overrides lower priority setting Higher priority setting overrides lower priority setting
method: The computation method of the gradients.
- ``central`` computes :math:`\frac{f(x+e)-f(x-e)}{2e}`,
- ``forward`` computes :math:`\frac{f(x+e) - f(x)}{e}`,
- ``backward`` computes :math:`\frac{f(x)-f(x-e)}{e}`
where :math:`e` is epsilon.
Raises: Raises:
ValueError: If ``epsilon`` is not positive. ValueError: If ``epsilon`` is not positive.
TypeError: If ``method`` is invalid.
""" """
if epsilon <= 0: if epsilon <= 0:
raise ValueError(f"epsilon ({epsilon}) should be positive.") raise ValueError(f"epsilon ({epsilon}) should be positive.")
self._epsilon = epsilon self._epsilon = epsilon
if method not in ("central", "forward", "backward"):
raise TypeError(
f"The argument method should be central, forward, or backward: {method} is given."
)
self._method = method
super().__init__(sampler, options) super().__init__(sampler, options)
def _run( def _run(
@ -70,10 +92,23 @@ class FiniteDiffSamplerGradient(BaseSamplerGradient):
indices = [circuit.parameters.data.index(p) for p in parameters_] indices = [circuit.parameters.data.index(p) for p in parameters_]
metadata_.append({"parameters": [circuit.parameters[idx] for idx in indices]}) metadata_.append({"parameters": [circuit.parameters[idx] for idx in indices]})
offset = np.identity(circuit.num_parameters)[indices, :] offset = np.identity(circuit.num_parameters)[indices, :]
plus = parameter_values_ + self._epsilon * offset if self._method == "central":
minus = parameter_values_ - self._epsilon * offset plus = parameter_values_ + self._epsilon * offset
n = 2 * len(indices) minus = parameter_values_ - self._epsilon * offset
job = self._sampler.run([circuit] * n, plus.tolist() + minus.tolist(), **options) n = 2 * len(indices)
job = self._sampler.run([circuit] * n, plus.tolist() + minus.tolist(), **options)
elif self._method == "forward":
plus = parameter_values_ + self._epsilon * offset
n = len(indices) + 1
job = self._sampler.run(
[circuit] * n, [parameter_values_] + plus.tolist(), **options
)
elif self._method == "backward":
minus = parameter_values_ - self._epsilon * offset
n = len(indices) + 1
job = self._sampler.run(
[circuit] * n, [parameter_values_] + minus.tolist(), **options
)
jobs.append(job) jobs.append(job)
# combine the results # combine the results
@ -84,14 +119,33 @@ class FiniteDiffSamplerGradient(BaseSamplerGradient):
gradients = [] gradients = []
for i, result in enumerate(results): for i, result in enumerate(results):
n = len(result.quasi_dists) // 2 if self._method == "central":
gradient_ = [] n = len(result.quasi_dists) // 2
for dist_plus, dist_minus in zip(result.quasi_dists[:n], result.quasi_dists[n:]): gradient_ = []
grad_dist = np.zeros(2 ** circuits[i].num_qubits) for dist_plus, dist_minus in zip(result.quasi_dists[:n], result.quasi_dists[n:]):
grad_dist[list(dist_plus.keys())] += list(dist_plus.values()) grad_dist = np.zeros(2 ** circuits[i].num_qubits)
grad_dist[list(dist_minus.keys())] -= list(dist_minus.values()) grad_dist[list(dist_plus.keys())] += list(dist_plus.values())
grad_dist /= 2 * self._epsilon grad_dist[list(dist_minus.keys())] -= list(dist_minus.values())
gradient_.append(dict(enumerate(grad_dist))) grad_dist /= 2 * self._epsilon
gradient_.append(dict(enumerate(grad_dist)))
elif self._method == "forward":
gradient_ = []
dist_zero = result.quasi_dists[0]
for dist_plus in result.quasi_dists[1:]:
grad_dist = np.zeros(2 ** circuits[i].num_qubits)
grad_dist[list(dist_plus.keys())] += list(dist_plus.values())
grad_dist[list(dist_zero.keys())] -= list(dist_zero.values())
grad_dist /= self._epsilon
gradient_.append(dict(enumerate(grad_dist)))
elif self._method == "backward":
gradient_ = []
dist_zero = result.quasi_dists[0]
for dist_minus in result.quasi_dists[1:]:
grad_dist = np.zeros(2 ** circuits[i].num_qubits)
grad_dist[list(dist_zero.keys())] += list(dist_zero.values())
grad_dist[list(dist_minus.keys())] -= list(dist_minus.values())
grad_dist /= self._epsilon
gradient_.append(dict(enumerate(grad_dist)))
gradients.append(gradient_) gradients.append(gradient_)
opt = self._get_local_options(options) opt = self._get_local_options(options)

View File

@ -0,0 +1,10 @@
---
features:
- |
:class:`.FiniteDiffEstimatorGradient` and :class:`FiniteDiffSamplerGradient`
have new argument method.
There are three methods, "central", "forward", and "backward".
This option changes the gradient calculation methods.
"central" calculates :math:`\frac{f(x+e)-f(x-e)}{2e}`, "forward"
:math:`\frac{f(x+e) - f(x)}{e}`, and "backward" :math:`\frac{f(x)-f(x-e)}{e}` where
:math:`e` is the offset epsilon.

View File

@ -35,14 +35,20 @@ from qiskit.quantum_info import Operator, SparsePauliOp, Pauli
from qiskit.quantum_info.random import random_pauli_list from qiskit.quantum_info.random import random_pauli_list
from qiskit.test import QiskitTestCase from qiskit.test import QiskitTestCase
gradient_factories = [
lambda estimator: FiniteDiffEstimatorGradient(estimator, epsilon=1e-6, method="central"),
lambda estimator: FiniteDiffEstimatorGradient(estimator, epsilon=1e-6, method="forward"),
lambda estimator: FiniteDiffEstimatorGradient(estimator, epsilon=1e-6, method="backward"),
ParamShiftEstimatorGradient,
LinCombEstimatorGradient,
]
@ddt @ddt
class TestEstimatorGradient(QiskitTestCase): class TestEstimatorGradient(QiskitTestCase):
"""Test Estimator Gradient""" """Test Estimator Gradient"""
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_operators(self, grad): def test_gradient_operators(self, grad):
"""Test the estimator gradient for different operators""" """Test the estimator gradient for different operators"""
estimator = Estimator() estimator = Estimator()
@ -51,10 +57,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc.h(0) qc.h(0)
qc.p(a, 0) qc.p(a, 0)
qc.h(0) qc.h(0)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
op = SparsePauliOp.from_list([("Z", 1)]) op = SparsePauliOp.from_list([("Z", 1)])
correct_result = -1 / np.sqrt(2) correct_result = -1 / np.sqrt(2)
param = [np.pi / 4] param = [np.pi / 4]
@ -67,9 +70,7 @@ class TestEstimatorGradient(QiskitTestCase):
value = gradient.run([qc], [op], [param]).result().gradients[0] value = gradient.run([qc], [op], [param]).result().gradients[0]
self.assertAlmostEqual(value[0], correct_result, 3) self.assertAlmostEqual(value[0], correct_result, 3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_p(self, grad): def test_gradient_p(self, grad):
"""Test the estimator gradient for p""" """Test the estimator gradient for p"""
estimator = Estimator() estimator = Estimator()
@ -78,10 +79,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc.h(0) qc.h(0)
qc.p(a, 0) qc.p(a, 0)
qc.h(0) qc.h(0)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
op = SparsePauliOp.from_list([("Z", 1)]) op = SparsePauliOp.from_list([("Z", 1)])
param_list = [[np.pi / 4], [0], [np.pi / 2]] param_list = [[np.pi / 4], [0], [np.pi / 2]]
correct_results = [[-1 / np.sqrt(2)], [0], [-1]] correct_results = [[-1 / np.sqrt(2)], [0], [-1]]
@ -90,9 +88,7 @@ class TestEstimatorGradient(QiskitTestCase):
for j, value in enumerate(gradients): for j, value in enumerate(gradients):
self.assertAlmostEqual(value, correct_results[i][j], 3) self.assertAlmostEqual(value, correct_results[i][j], 3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_u(self, grad): def test_gradient_u(self, grad):
"""Test the estimator gradient for u""" """Test the estimator gradient for u"""
estimator = Estimator() estimator = Estimator()
@ -103,10 +99,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc.h(0) qc.h(0)
qc.u(a, b, c, 0) qc.u(a, b, c, 0)
qc.h(0) qc.h(0)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
op = SparsePauliOp.from_list([("Z", 1)]) op = SparsePauliOp.from_list([("Z", 1)])
param_list = [[np.pi / 4, 0, 0], [np.pi / 4, np.pi / 4, np.pi / 4]] param_list = [[np.pi / 4, 0, 0], [np.pi / 4, np.pi / 4, np.pi / 4]]
@ -116,18 +109,13 @@ class TestEstimatorGradient(QiskitTestCase):
for j, value in enumerate(gradients): for j, value in enumerate(gradients):
self.assertAlmostEqual(value, correct_results[i][j], 3) self.assertAlmostEqual(value, correct_results[i][j], 3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_efficient_su2(self, grad): def test_gradient_efficient_su2(self, grad):
"""Test the estimator gradient for EfficientSU2""" """Test the estimator gradient for EfficientSU2"""
estimator = Estimator() estimator = Estimator()
qc = EfficientSU2(2, reps=1) qc = EfficientSU2(2, reps=1)
op = SparsePauliOp.from_list([("ZI", 1)]) op = SparsePauliOp.from_list([("ZI", 1)])
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
param_list = [ param_list = [
[np.pi / 4 for param in qc.parameters], [np.pi / 4 for param in qc.parameters],
[np.pi / 2 for param in qc.parameters], [np.pi / 2 for param in qc.parameters],
@ -149,9 +137,7 @@ class TestEstimatorGradient(QiskitTestCase):
gradients = gradient.run([qc], [op], [param]).result().gradients[0] gradients = gradient.run([qc], [op], [param]).result().gradients[0]
np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3) np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient],
)
def test_gradient_2qubit_gate(self, grad): def test_gradient_2qubit_gate(self, grad):
"""Test the estimator gradient for 2 qubit gates""" """Test the estimator gradient for 2 qubit gates"""
estimator = Estimator() estimator = Estimator()
@ -165,10 +151,7 @@ class TestEstimatorGradient(QiskitTestCase):
for i, param in enumerate(param_list): for i, param in enumerate(param_list):
a = Parameter("a") a = Parameter("a")
qc = QuantumCircuit(2) qc = QuantumCircuit(2)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
if gate is RZZGate: if gate is RZZGate:
qc.h([0, 1]) qc.h([0, 1])
@ -179,9 +162,7 @@ class TestEstimatorGradient(QiskitTestCase):
gradients = gradient.run([qc], [op], [param]).result().gradients[0] gradients = gradient.run([qc], [op], [param]).result().gradients[0]
np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3) np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_parameter_coefficient(self, grad): def test_gradient_parameter_coefficient(self, grad):
"""Test the estimator gradient for parameter variables with coefficients""" """Test the estimator gradient for parameter variables with coefficients"""
estimator = Estimator() estimator = Estimator()
@ -191,10 +172,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc.u(qc.parameters[0], qc.parameters[1], qc.parameters[3], 1) qc.u(qc.parameters[0], qc.parameters[1], qc.parameters[3], 1)
qc.p(2 * qc.parameters[0] + 1, 0) qc.p(2 * qc.parameters[0] + 1, 0)
qc.rxx(qc.parameters[0] + 2, 0, 1) qc.rxx(qc.parameters[0] + 2, 0, 1)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
param_list = [[np.pi / 4 for _ in qc.parameters], [np.pi / 2 for _ in qc.parameters]] param_list = [[np.pi / 4 for _ in qc.parameters], [np.pi / 2 for _ in qc.parameters]]
correct_results = [ correct_results = [
[-0.7266653, -0.4905135, -0.0068606, -0.9228880], [-0.7266653, -0.4905135, -0.0068606, -0.9228880],
@ -205,9 +183,7 @@ class TestEstimatorGradient(QiskitTestCase):
gradients = gradient.run([qc], [op], [param]).result().gradients[0] gradients = gradient.run([qc], [op], [param]).result().gradients[0]
np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3) np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_parameters(self, grad): def test_gradient_parameters(self, grad):
"""Test the estimator gradient for parameters""" """Test the estimator gradient for parameters"""
estimator = Estimator() estimator = Estimator()
@ -216,10 +192,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc = QuantumCircuit(1) qc = QuantumCircuit(1)
qc.rx(a, 0) qc.rx(a, 0)
qc.rx(b, 0) qc.rx(b, 0)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
param_list = [[np.pi / 4, np.pi / 2]] param_list = [[np.pi / 4, np.pi / 2]]
correct_results = [ correct_results = [
[-0.70710678], [-0.70710678],
@ -229,9 +202,7 @@ class TestEstimatorGradient(QiskitTestCase):
gradients = gradient.run([qc], [op], [param], parameters=[[a]]).result().gradients[0] gradients = gradient.run([qc], [op], [param], parameters=[[a]]).result().gradients[0]
np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3) np.testing.assert_allclose(gradients, correct_results[i], atol=1e-3)
@combine( @combine(grad=gradient_factories)
grad=[FiniteDiffEstimatorGradient, ParamShiftEstimatorGradient, LinCombEstimatorGradient]
)
def test_gradient_multi_arguments(self, grad): def test_gradient_multi_arguments(self, grad):
"""Test the estimator gradient for multiple arguments""" """Test the estimator gradient for multiple arguments"""
estimator = Estimator() estimator = Estimator()
@ -241,10 +212,7 @@ class TestEstimatorGradient(QiskitTestCase):
qc.rx(a, 0) qc.rx(a, 0)
qc2 = QuantumCircuit(1) qc2 = QuantumCircuit(1)
qc2.rx(b, 0) qc2.rx(b, 0)
if grad is FiniteDiffEstimatorGradient: gradient = grad(estimator)
gradient = grad(estimator, epsilon=1e-6)
else:
gradient = grad(estimator)
param_list = [[np.pi / 4], [np.pi / 2]] param_list = [[np.pi / 4], [np.pi / 2]]
correct_results = [ correct_results = [
[-0.70710678], [-0.70710678],

View File

@ -34,12 +34,20 @@ from qiskit.primitives import Sampler
from qiskit.result import QuasiDistribution from qiskit.result import QuasiDistribution
from qiskit.test import QiskitTestCase from qiskit.test import QiskitTestCase
gradient_factories = [
lambda sampler: FiniteDiffSamplerGradient(sampler, epsilon=1e-6, method="central"),
lambda sampler: FiniteDiffSamplerGradient(sampler, epsilon=1e-6, method="forward"),
lambda sampler: FiniteDiffSamplerGradient(sampler, epsilon=1e-6, method="backward"),
ParamShiftSamplerGradient,
LinCombSamplerGradient,
]
@ddt @ddt
class TestSamplerGradient(QiskitTestCase): class TestSamplerGradient(QiskitTestCase):
"""Test Sampler Gradient""" """Test Sampler Gradient"""
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_p(self, grad): def test_gradient_p(self, grad):
"""Test the sampler gradient for p""" """Test the sampler gradient for p"""
sampler = Sampler() sampler = Sampler()
@ -49,10 +57,7 @@ class TestSamplerGradient(QiskitTestCase):
qc.p(a, 0) qc.p(a, 0)
qc.h(0) qc.h(0)
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [[np.pi / 4], [0], [np.pi / 2]] param_list = [[np.pi / 4], [0], [np.pi / 2]]
correct_results = [ correct_results = [
[{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}], [{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}],
@ -65,7 +70,7 @@ class TestSamplerGradient(QiskitTestCase):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_u(self, grad): def test_gradient_u(self, grad):
"""Test the sampler gradient for u""" """Test the sampler gradient for u"""
sampler = Sampler() sampler = Sampler()
@ -77,10 +82,7 @@ class TestSamplerGradient(QiskitTestCase):
qc.u(a, b, c, 0) qc.u(a, b, c, 0)
qc.h(0) qc.h(0)
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [[np.pi / 4, 0, 0], [np.pi / 4, np.pi / 4, np.pi / 4]] param_list = [[np.pi / 4, 0, 0], [np.pi / 4, np.pi / 4, np.pi / 4]]
correct_results = [ correct_results = [
[{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}, {0: 0, 1: 0}, {0: 0, 1: 0}], [{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}, {0: 0, 1: 0}, {0: 0, 1: 0}],
@ -92,16 +94,13 @@ class TestSamplerGradient(QiskitTestCase):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_efficient_su2(self, grad): def test_gradient_efficient_su2(self, grad):
"""Test the sampler gradient for EfficientSU2""" """Test the sampler gradient for EfficientSU2"""
sampler = Sampler() sampler = Sampler()
qc = EfficientSU2(2, reps=1) qc = EfficientSU2(2, reps=1)
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [ param_list = [
[np.pi / 4 for param in qc.parameters], [np.pi / 4 for param in qc.parameters],
[np.pi / 2 for param in qc.parameters], [np.pi / 2 for param in qc.parameters],
@ -189,7 +188,7 @@ class TestSamplerGradient(QiskitTestCase):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_2qubit_gate(self, grad): def test_gradient_2qubit_gate(self, grad):
"""Test the sampler gradient for 2 qubit gates""" """Test the sampler gradient for 2 qubit gates"""
sampler = Sampler() sampler = Sampler()
@ -211,16 +210,13 @@ class TestSamplerGradient(QiskitTestCase):
qc = QuantumCircuit(2) qc = QuantumCircuit(2)
qc.append(gate(a), [qc.qubits[0], qc.qubits[1]], []) qc.append(gate(a), [qc.qubits[0], qc.qubits[1]], [])
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
gradients = gradient.run([qc], [param]).result().gradients[0] gradients = gradient.run([qc], [param]).result().gradients[0]
for j, quasi_dist in enumerate(gradients): for j, quasi_dist in enumerate(gradients):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_parameter_coefficient(self, grad): def test_gradient_parameter_coefficient(self, grad):
"""Test the sampler gradient for parameter variables with coefficients""" """Test the sampler gradient for parameter variables with coefficients"""
sampler = Sampler() sampler = Sampler()
@ -231,10 +227,7 @@ class TestSamplerGradient(QiskitTestCase):
qc.p(2 * qc.parameters[0] + 1, 0) qc.p(2 * qc.parameters[0] + 1, 0)
qc.rxx(qc.parameters[0] + 2, 0, 1) qc.rxx(qc.parameters[0] + 2, 0, 1)
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [[np.pi / 4 for _ in qc.parameters], [np.pi / 2 for _ in qc.parameters]] param_list = [[np.pi / 4 for _ in qc.parameters], [np.pi / 2 for _ in qc.parameters]]
correct_results = [ correct_results = [
[ [
@ -297,7 +290,7 @@ class TestSamplerGradient(QiskitTestCase):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 2) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 2)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_parameters(self, grad): def test_gradient_parameters(self, grad):
"""Test the sampler gradient for parameters""" """Test the sampler gradient for parameters"""
sampler = Sampler() sampler = Sampler()
@ -307,10 +300,7 @@ class TestSamplerGradient(QiskitTestCase):
qc.rx(a, 0) qc.rx(a, 0)
qc.rz(b, 0) qc.rz(b, 0)
qc.measure_all() qc.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [[np.pi / 4, np.pi / 2]] param_list = [[np.pi / 4, np.pi / 2]]
correct_results = [ correct_results = [
[{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}], [{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}],
@ -321,7 +311,7 @@ class TestSamplerGradient(QiskitTestCase):
for k in quasi_dist: for k in quasi_dist:
self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3) self.assertAlmostEqual(quasi_dist[k], correct_results[i][j][k], 3)
@combine(grad=[FiniteDiffSamplerGradient, ParamShiftSamplerGradient, LinCombSamplerGradient]) @combine(grad=gradient_factories)
def test_gradient_multi_arguments(self, grad): def test_gradient_multi_arguments(self, grad):
"""Test the sampler gradient for multiple arguments""" """Test the sampler gradient for multiple arguments"""
sampler = Sampler() sampler = Sampler()
@ -333,10 +323,7 @@ class TestSamplerGradient(QiskitTestCase):
qc2 = QuantumCircuit(1) qc2 = QuantumCircuit(1)
qc2.rx(b, 0) qc2.rx(b, 0)
qc2.measure_all() qc2.measure_all()
if grad is FiniteDiffSamplerGradient: gradient = grad(sampler)
gradient = grad(sampler, epsilon=1e-6)
else:
gradient = grad(sampler)
param_list = [[np.pi / 4], [np.pi / 2]] param_list = [[np.pi / 4], [np.pi / 2]]
correct_results = [ correct_results = [
[{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}], [{0: -0.5 / np.sqrt(2), 1: 0.5 / np.sqrt(2)}],