Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add callback to SGD and CMA-ES optimizer #1335

Merged
merged 10 commits into from
Jun 5, 2024
16 changes: 14 additions & 2 deletions src/qibo/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def myloss(parameters, circuit):

backend = _check_backend(backend)

return sgd(loss, initial_parameters, args, options, compile, backend)
return sgd(loss, initial_parameters, args, callback, options, compile, backend)
else:
from qibo.backends import _check_backend

Expand Down Expand Up @@ -213,7 +213,15 @@ def newtonian(
return m.fun, m.x, m


def sgd(loss, initial_parameters, args=(), options=None, compile=False, backend=None):
def sgd(
loss,
initial_parameters,
args=(),
callback=None,
options=None,
compile=False,
backend=None,
):
"""Stochastic Gradient Descent (SGD) optimizer using Tensorflow backpropagation.

See `tf.keras.Optimizers <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers>`_
Expand All @@ -225,6 +233,7 @@ def sgd(loss, initial_parameters, args=(), options=None, compile=False, backend=
initial_parameters (np.ndarray): Initial guess for the variational
parameters.
args (tuple): optional arguments for the loss function.
callback (callable): Called after each iteration.
options (dict): Dictionary with options for the SGD optimizer. Supports
the following keys:

Expand All @@ -234,6 +243,7 @@ def sgd(loss, initial_parameters, args=(), options=None, compile=False, backend=
- ``'nmessage'`` (int, default: ``1e3``): Every how many epochs to print
a message of the loss function.
"""

if not backend.name == "tensorflow":
raise_error(RuntimeError, "SGD optimizer requires Tensorflow backend.")

Expand Down Expand Up @@ -265,6 +275,8 @@ def opt_step():

for e in range(sgd_options["nepochs"]):
l = opt_step()
if callback is not None:
callback(vparams)
if e % sgd_options["nmessage"] == 1:
log.info("ite %d : loss %f", e, l.numpy())

Expand Down
10 changes: 9 additions & 1 deletion tests/test_models_variational.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,16 @@ def test_vqe(backend, method, options, compile, filename):
np.random.seed(0)
initial_parameters = np.random.uniform(0, 2 * np.pi, 2 * nqubits * layers + nqubits)
v = models.VQE(circuit, hamiltonian)

def callback(parameters):
pass
MatteoRobbiati marked this conversation as resolved.
Show resolved Hide resolved

best, params, _ = v.minimize(
initial_parameters, method=method, options=options, compile=compile
initial_parameters,
method=method,
options=options,
compile=compile,
callback=callback,
)
if method == "cma":
# remove `outcmaes` folder
Expand Down