Skip to content

Commit

Permalink
Merge branch 'main' into coupling-map
Browse files Browse the repository at this point in the history
  • Loading branch information
paniash committed Sep 13, 2021
2 parents a8a69e0 + f0492b1 commit ba9f7f1
Show file tree
Hide file tree
Showing 46 changed files with 1,273 additions and 557 deletions.
15 changes: 10 additions & 5 deletions qiskit/algorithms/linear_solvers/hhl.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,12 +310,15 @@ def construct_circuit(
self,
matrix: Union[List, np.ndarray, QuantumCircuit],
vector: Union[List, np.ndarray, QuantumCircuit],
neg_vals: Optional[bool] = True,
) -> QuantumCircuit:
"""Construct the HHL circuit.
Args:
matrix: The matrix specifying the system, i.e. A in Ax=b.
vector: The vector specifying the right hand side of the equation in Ax=b.
neg_vals: States whether the matrix has negative eigenvalues. If False the
computation becomes cheaper.
Returns:
The HHL circuit.
Expand Down Expand Up @@ -376,24 +379,26 @@ def construct_circuit(
else:
kappa = 1
# Update the number of qubits required to represent the eigenvalues
nl = max(nb + 1, int(np.log2(kappa)) + 1)
# The +neg_vals is to register negative eigenvalues because
# e^{-2 \pi i \lambda} = e^{2 \pi i (1 - \lambda)}
nl = max(nb + 1, int(np.log2(kappa)) + 1) + neg_vals

# check if the matrix can calculate bounds for the eigenvalues
if hasattr(matrix_circuit, "eigs_bounds") and matrix_circuit.eigs_bounds() is not None:
lambda_min, lambda_max = matrix_circuit.eigs_bounds()
# Constant so that the minimum eigenvalue is represented exactly, since it contributes
# the most to the solution of the system
delta = self._get_delta(nl, lambda_min, lambda_max)
# the most to the solution of the system. -1 to take into account the sign qubit
delta = self._get_delta(nl - neg_vals, lambda_min, lambda_max)
# Update evolution time
matrix_circuit.evolution_time = 2 * np.pi * delta / lambda_min
matrix_circuit.evolution_time = 2 * np.pi * delta / lambda_min / (2 ** neg_vals)
# Update the scaling of the solution
self.scaling = lambda_min
else:
delta = 1 / (2 ** nl)
print("The solution will be calculated up to a scaling factor.")

if self._exact_reciprocal:
reciprocal_circuit = ExactReciprocal(nl, delta)
reciprocal_circuit = ExactReciprocal(nl, delta, neg_vals=neg_vals)
# Update number of ancilla qubits
na = matrix_circuit.num_ancillas
else:
Expand Down
41 changes: 28 additions & 13 deletions qiskit/algorithms/minimum_eigen_solvers/vqe.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,22 +487,37 @@ def compute_minimum_eigenvalue(
)

start_time = time()
opt_params, opt_value, nfev = self.optimizer.optimize(
num_vars=len(initial_point),
objective_function=energy_evaluation,
gradient_function=gradient,
variable_bounds=bounds,
initial_point=initial_point,
)

# keep this until Optimizer.optimize is removed
try:
opt_result = self.optimizer.minimize(
fun=energy_evaluation, x0=initial_point, jac=gradient, bounds=bounds
)
except AttributeError:
# self.optimizer is an optimizer with the deprecated interface that uses
# ``optimize`` instead of ``minimize```
warnings.warn(
"Using an optimizer that is run with the ``optimize`` method is "
"deprecated as of Qiskit Terra 0.19.0 and will be unsupported no "
"sooner than 3 months after the release date. Instead use an optimizer "
"providing ``minimize`` (see qiskit.algorithms.optimizers.Optimizer).",
DeprecationWarning,
stacklevel=2,
)

opt_result = self.optimizer.optimize(
len(initial_point), energy_evaluation, gradient, bounds, initial_point
)

eval_time = time() - start_time

result = VQEResult()
result.optimal_point = opt_params
result.optimal_parameters = dict(zip(self._ansatz_params, opt_params))
result.optimal_value = opt_value
result.cost_function_evals = nfev
result.optimal_point = opt_result.x
result.optimal_parameters = dict(zip(self._ansatz_params, opt_result.x))
result.optimal_value = opt_result.fun
result.cost_function_evals = opt_result.nfev
result.optimizer_time = eval_time
result.eigenvalue = opt_value + 0j
result.eigenvalue = opt_result.fun + 0j
result.eigenstate = self._get_eigenstate(result.optimal_parameters)

logger.info(
Expand All @@ -516,7 +531,7 @@ def compute_minimum_eigenvalue(
self._ret = result

if aux_operators is not None:
aux_values = self._eval_aux_ops(opt_params, aux_operators, expectation=expectation)
aux_values = self._eval_aux_ops(opt_result.x, aux_operators, expectation=expectation)
result.aux_operator_eigenvalues = aux_values[0]

return result
Expand Down
3 changes: 2 additions & 1 deletion qiskit/algorithms/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
:toctree: ../stubs/
:nosignatures:
OptimizerResult
OptimizerSupportLevel
Optimizer
Expand Down Expand Up @@ -117,7 +118,7 @@
from .nlopts.direct_l_rand import DIRECT_L_RAND
from .nlopts.esch import ESCH
from .nlopts.isres import ISRES
from .optimizer import Optimizer, OptimizerSupportLevel
from .optimizer import Optimizer, OptimizerResult, OptimizerSupportLevel
from .p_bfgs import P_BFGS
from .powell import POWELL
from .qnspsa import QNSPSA
Expand Down
72 changes: 49 additions & 23 deletions qiskit/algorithms/optimizers/adam_amsgrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import csv
import numpy as np
from qiskit.utils import algorithm_globals
from .optimizer import Optimizer, OptimizerSupportLevel
from qiskit.utils.deprecation import deprecate_arguments
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT

# pylint: disable=invalid-name

Expand Down Expand Up @@ -193,33 +194,56 @@ def load_params(self, load_dir: str) -> None:
t = t[1:-1]
self._t = np.fromstring(t, dtype=int, sep=" ")

@deprecate_arguments(
{
"objective_function": "fun",
"initial_point": "x0",
"gradient_function": "jac",
}
)
# pylint: disable=arguments-differ
def minimize(
self,
objective_function: Callable[[np.ndarray], float],
initial_point: np.ndarray,
gradient_function: Callable[[np.ndarray], float],
) -> Tuple[np.ndarray, float, int]:
"""Run the minimization.
fun: Callable[[POINT], float],
x0: POINT,
jac: Optional[Callable[[POINT], POINT]] = None,
bounds: Optional[List[Tuple[float, float]]] = None,
# pylint:disable=unused-argument
objective_function: Optional[Callable[[np.ndarray], float]] = None,
initial_point: Optional[np.ndarray] = None,
gradient_function: Optional[Callable[[np.ndarray], float]] = None,
# ) -> Tuple[np.ndarray, float, int]:
) -> OptimizerResult: # TODO find proper way to deprecate return type
"""Minimize the scalar function.
Args:
objective_function: A function handle to the objective function.
initial_point: The initial iteration point.
gradient_function: A function handle to the gradient of the objective function.
fun: The scalar function to minimize.
x0: The initial point for the minimization.
jac: The gradient of the scalar function ``fun``.
bounds: Bounds for the variables of ``fun``. This argument might be ignored if the
optimizer does not support bounds.
objective_function: DEPRECATED. A function handle to the objective function.
initial_point: DEPRECATED. The initial iteration point.
gradient_function: DEPRECATED. A function handle to the gradient of the objective
function.
Returns:
A tuple of (optimal parameters, optimal value, number of iterations).
The result of the optimization, containing e.g. the result as attribute ``x``.
"""
derivative = gradient_function(initial_point)
if jac is None:
jac = Optimizer.wrap_function(Optimizer.gradient_num_diff, (fun, self._eps))

derivative = jac(x0)
self._t = 0
self._m = np.zeros(np.shape(derivative))
self._v = np.zeros(np.shape(derivative))
if self._amsgrad:
self._v_eff = np.zeros(np.shape(derivative))

params = params_new = initial_point
params = params_new = x0
while self._t < self._maxiter:
if self._t > 0:
derivative = gradient_function(params)
derivative = jac(params)
self._t += 1
self._m = self._beta_1 * self._m + (1 - self._beta_1) * derivative
self._v = self._beta_2 * self._v + (1 - self._beta_2) * derivative * derivative
Expand All @@ -236,12 +260,18 @@ def minimize(

if self._snapshot_dir:
self.save_params(self._snapshot_dir)

# check termination
if np.linalg.norm(params - params_new) < self._tol:
return params_new, objective_function(params_new), self._t
else:
params = params_new
break

params = params_new

return params_new, objective_function(params_new), self._t
result = OptimizerResult()
result.x = params_new
result.fun = fun(params_new)
result.nfev = self._t
return result

def optimize(
self,
Expand Down Expand Up @@ -273,10 +303,6 @@ def optimize(
)
if initial_point is None:
initial_point = algorithm_globals.random.random(num_vars)
if gradient_function is None:
gradient_function = Optimizer.wrap_function(
Optimizer.gradient_num_diff, (objective_function, self._eps)
)

point, value, nfev = self.minimize(objective_function, initial_point, gradient_function)
return point, value, nfev
result = self.minimize(objective_function, initial_point, gradient_function)
return result.x, result.fun, result.nfev
38 changes: 26 additions & 12 deletions qiskit/algorithms/optimizers/aqgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
"""Analytical Quantum Gradient Descent (AQGD) optimizer."""

import logging
from typing import Callable, Tuple, List, Dict, Union, Any
from typing import Callable, Tuple, List, Dict, Union, Any, Optional

import numpy as np
from qiskit.utils.validation import validate_range_exclusive_max
from .optimizer import Optimizer, OptimizerSupportLevel
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT
from ..exceptions import AlgorithmError

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -302,9 +302,20 @@ def optimize(
super().optimize(
num_vars, objective_function, gradient_function, variable_bounds, initial_point
)
result = self.minimize(
objective_function, initial_point, gradient_function, variable_bounds
)
return result.x, result.fun, result.nfev

params = np.array(initial_point)
momentum = np.zeros(shape=(num_vars,))
def minimize(
self,
fun: Callable[[POINT], float],
x0: POINT,
jac: Optional[Callable[[POINT], POINT]] = None,
bounds: Optional[List[Tuple[float, float]]] = None,
) -> OptimizerResult:
params = np.asarray(x0)
momentum = np.zeros(shape=(params.size,))
# empty out history of previous objectives/gradients/parameters
# (in case this object is re-used)
self._prev_loss = []
Expand All @@ -331,13 +342,11 @@ def optimize(
break

# Calculate objective function and estimate of analytical gradient
if gradient_function is None:
objval, gradient = self._compute_objective_fn_and_gradient(
params, objective_function
)
if jac is None:
objval, gradient = self._compute_objective_fn_and_gradient(params, fun)
else:
objval = objective_function(params)
gradient = gradient_function(params)
objval = fun(params)
gradient = jac(params)

logger.info(
" Iter: %4d | Obj: %11.6f | Grad Norm: %f",
Expand All @@ -360,5 +369,10 @@ def optimize(
epoch += 1
# end epoch iteration

# return last parameter values, objval estimate, and objective evaluation count
return params, objval, self._eval_count
result = OptimizerResult()
result.x = params
result.fun = objval
result.nfev = self._eval_count
result.nit = iter_count

return result
35 changes: 26 additions & 9 deletions qiskit/algorithms/optimizers/bobyqa.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@

"""Bound Optimization BY Quadratic Approximation (BOBYQA) optimizer."""

from typing import Any, Dict
from typing import Any, Dict, Tuple, List, Callable, Optional

import numpy as np
from qiskit.exceptions import MissingOptionalLibraryError
from .optimizer import Optimizer, OptimizerSupportLevel
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT

try:
import skquant.opt as skq
Expand Down Expand Up @@ -67,6 +67,27 @@ def get_support_level(self):
def settings(self) -> Dict[str, Any]:
return {"maxiter": self._maxiter}

def minimize(
self,
fun: Callable[[POINT], float],
x0: POINT,
jac: Optional[Callable[[POINT], POINT]] = None,
bounds: Optional[List[Tuple[float, float]]] = None,
) -> OptimizerResult:
res, history = skq.minimize(
func=fun,
x0=np.asarray(x0),
bounds=np.array(bounds),
budget=self._maxiter,
method="bobyqa",
)

optimizer_result = OptimizerResult()
optimizer_result.x = res.optpar
optimizer_result.fun = res.optval
optimizer_result.nfev = len(history)
return optimizer_result

def optimize(
self,
num_vars,
Expand All @@ -79,11 +100,7 @@ def optimize(
super().optimize(
num_vars, objective_function, gradient_function, variable_bounds, initial_point
)
res, history = skq.minimize(
objective_function,
np.array(initial_point),
bounds=np.array(variable_bounds),
budget=self._maxiter,
method="bobyqa",
result = self.minimize(
objective_function, initial_point, gradient_function, variable_bounds
)
return res.optpar, res.optval, len(history)
return result.x, result.fun, result.nfev
Loading

0 comments on commit ba9f7f1

Please sign in to comment.