Skip to content

Commit

Permalink
fix error in dbi_utils and variational
Browse files Browse the repository at this point in the history
  • Loading branch information
Simone-Bordoni committed Jun 19, 2024
1 parent 60caf19 commit 0e0c955
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 13 deletions.
4 changes: 3 additions & 1 deletion src/qibo/models/dbi/double_bracket.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,9 @@ def cost_expansion(self, d, n):
if self.cost is DoubleBracketCostFunction.off_diagonal_norm:
coef = off_diagonal_norm_polynomial_expansion_coef(self, d, n)
elif self.cost is DoubleBracketCostFunction.least_squares:
coef = least_squares_polynomial_expansion_coef(self, d, n)
coef = least_squares_polynomial_expansion_coef(
self, d, n, backend=self.backend
)
elif self.cost is DoubleBracketCostFunction.energy_fluctuation:
coef = energy_fluctuation_polynomial_expansion_coef(
self, d, n, self.ref_state
Expand Down
12 changes: 8 additions & 4 deletions src/qibo/models/dbi/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,13 +71,15 @@ def cs_angle_sgn(dbi_object, d, backend=None):
backend = _check_backend(backend)
d = backend.cast(d)
norm = backend.np.trace(
backend.np.dot(
backend.np.matmul(
backend.np.conj(
dbi_object.commutator(dbi_object.diagonal_h_matrix, dbi_object.h.matrix)
).T,
dbi_object.commutator(d, dbi_object.h.matrix),
)
)
if backend.name == "pytorch":
return backend.np.real(backend.np.sgn(norm))
return backend.np.real(backend.np.sign(norm))


Expand Down Expand Up @@ -212,16 +214,18 @@ def off_diagonal_norm_polynomial_expansion_coef(dbi_object, d, n):
return coef


def least_squares_polynomial_expansion_coef(dbi_object, d, n: int = 3):
def least_squares_polynomial_expansion_coef(dbi_object, d, n: int = 3, backend=None):
"""Return the Taylor expansion coefficients of least square cost of `dbi_object.h` and diagonal operator `d` with respect to double bracket rotation duration `s`."""
# generate Gamma's where $\Gamma_{k+1}=[W, \Gamma_{k}], $\Gamma_0=H
backend = _check_backend(backend)
Gamma_list = dbi_object.generate_gamma_list(n + 1, d)
exp_list = np.array([1 / math.factorial(k) for k in range(n + 1)])
# coefficients
coef = np.empty(n)
for i in range(n):
coef[i] = np.real(
exp_list[i] * np.trace(dbi_object.backend.cast(d) @ Gamma_list[i + 1])
coef[i] = backend.np.real(
exp_list[i]
* backend.np.trace(dbi_object.backend.cast(d) @ Gamma_list[i + 1])
)
coef = list(reversed(coef))
return coef
Expand Down
2 changes: 2 additions & 0 deletions src/qibo/models/dbi/utils_dbr_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,7 @@ def func_loss_to_lr(lr):
parameterization=parameterization,
pauli_operator_dict=pauli_operator_dict,
normalize=normalize,
backend=backend,
)
return dbi_object.loss(step=s, d=d_eval)

Expand All @@ -296,6 +297,7 @@ def func_loss_to_lr(lr):
parameterization=parameterization,
pauli_operator_dict=pauli_operator_dict,
normalize=normalize,
backend=backend,
)
s = dbi_object.choose_step(d=d)
dbi_object(step=s, d=d)
Expand Down
26 changes: 18 additions & 8 deletions src/qibo/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,9 +262,6 @@ def sgd(
a message of the loss function.
"""

if not backend.name == "tensorflow":
raise_error(RuntimeError, "SGD optimizer requires Tensorflow backend.")

sgd_options = {
"nepochs": 1000000,
"nmessage": 1000,
Expand All @@ -275,20 +272,30 @@ def sgd(
sgd_options.update(options)

if backend.name == "tensorflow":
return _sgd_tf(loss, initial_parameters, args, sgd_options, compile, backend)
return _sgd_tf(
loss,
initial_parameters,
args,
sgd_options,
compile,
backend,
callback=callback,
)
elif backend.name == "pytorch":
if compile:
log.warning(
"PyTorch does not support compilation of the optimization graph."
)
return _sgd_torch(loss, initial_parameters, args, sgd_options, backend)
return _sgd_torch(
loss, initial_parameters, args, sgd_options, backend, callback=callback
)
else:
raise_error(
RuntimeError, "SGD optimizer requires Tensorflow or PyTorch backend."
)


def _sgd_torch(loss, initial_parameters, args, sgd_options, backend):
def _sgd_torch(loss, initial_parameters, args, sgd_options, backend, callback=None):

vparams = initial_parameters
optimizer = getattr(backend.np.optim, sgd_options["optimizer"])(
Expand All @@ -300,14 +307,17 @@ def _sgd_torch(loss, initial_parameters, args, sgd_options, backend):
l = loss(vparams, *args)
l.backward()
optimizer.step()

if callback is not None:
callback(backend.to_numpy(vparams))
if e % sgd_options["nmessage"] == 1:
log.info("ite %d : loss %f", e, l.item())

return loss(vparams, *args).item(), vparams.detach().numpy(), sgd_options


def _sgd_tf(loss, initial_parameters, args, sgd_options, compile, backend):
def _sgd_tf(
loss, initial_parameters, args, sgd_options, compile, backend, callback=None
):

vparams = backend.tf.Variable(initial_parameters)
optimizer = getattr(backend.tf.optimizers, sgd_options["optimizer"])(
Expand Down
2 changes: 2 additions & 0 deletions tests/test_models_dbi.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,12 +229,14 @@ def test_params_to_diagonal_operator(backend, step):
nqubits=nqubits,
parameterization=ParameterizationTypes.pauli,
pauli_operator_dict=pauli_operator_dict,
backend=backend,
),
)
operator_element = params_to_diagonal_operator(
params,
nqubits=nqubits,
parameterization=ParameterizationTypes.computational,
backend=backend,
)
for i in range(len(params)):
backend.assert_allclose(
Expand Down
1 change: 1 addition & 0 deletions tests/test_models_dbi_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
DoubleBracketIteration,
)
from qibo.models.dbi.utils import *
from qibo.models.dbi.utils_dbr_strategies import select_best_dbr_generator
from qibo.quantum_info import random_hermitian

NSTEPS = 5
Expand Down

0 comments on commit 0e0c955

Please sign in to comment.