diff --git a/src/qibo/models/variational.py b/src/qibo/models/variational.py index bd8db47e98..cc6b8b65f3 100644 --- a/src/qibo/models/variational.py +++ b/src/qibo/models/variational.py @@ -39,10 +39,7 @@ def __init__(self, circuit, hamiltonian): self.hamiltonian = hamiltonian def minimize( - self, - opt, - compile=False, - epochs=100, + self, opt, initial_parameters, loss=None, fit_options=dict(), compile=False ): """Search for parameters which minimizes the hamiltonian expectation. @@ -78,13 +75,12 @@ def _loss(params, circuit, hamiltonian): # elif isinstance(opt, qibo.optimizers.gradient_based.TensorflowSGD): # loss = lambda p, c, h: self.hamiltonian.backend.to_numpy(_loss(p, c, h)) - fit_options = {} - if isinstance(opt, qibo.optimizers.gradient_based.TensorflowSGD): - fit_options.update({"epochs": epochs}) - - opt.loss = loss - opt.args = (self.circuit, self.hamiltonian) - result, parameters, extra = opt.fit(**fit_options) + result, parameters, extra = opt.fit( + initial_parameters, + loss, + args=(self.circuit, self.hamiltonian), + fit_options=fit_options, + ) self.circuit.set_parameters(parameters) return result, parameters, extra @@ -219,7 +215,9 @@ def hamiltonian(self, t): st = self.schedule(t) return self._h0 * (1 - st) + self._h1 * st - def minimize(self, opt, compile=False, epochs=100): + def minimize( + self, opt, initial_parameters, fit_options=dict(), compile=False, epochs=100 + ): """ Performs minimization to find the ground state of the problem Hamiltonian. @@ -232,11 +230,13 @@ def minimize(self, opt, compile=False, epochs=100): from qibo import models t = 0.0 + params = initial_parameters while (t - self._t_max) <= self.ATOL_TIME: H = self.hamiltonian(t) vqe = models.VQE(self._circuit, H) - best, params, _ = vqe.minimize(opt, compile, epochs) - opt.params = params + best, params, _ = vqe.minimize( + opt, params, fit_options=fit_options, compile=compile + ) t += self._dt return best, params @@ -395,12 +395,10 @@ def minimize( self, opt, initial_parameters, - hamiltonian, + loss=None, initial_state=None, fit_options=dict(), - loss=None, loss_func_param=dict(), - epochs=100, ): """Optimizes the variational parameters of the QAOA. A few loss functions are provided for QAOA optimizations such as expected value (default), CVar which is introduced in @@ -412,7 +410,6 @@ def minimize( opt (:class:`qibo.models.circuit.Circuit`): optimization object used to minimise the loss function initial_state (np.ndarray): initial state vector of the QAOA. loss_func_param (dict): a dictionary to pass in the loss function parameters. - epochs (int): number of training epochs. Return: The final energy (expectation value of the ``hamiltonian``). @@ -472,17 +469,14 @@ def _loss(params, qaoa, hamiltonian, state): _loss(p, c, h, s) ) - if isinstance(opt, qibo.optimizers.gradient_based.TensorflowSGD): - fit_options.update({"epochs": epochs}) - opt.args = (self, self.hamiltonian, initial_state) result, parameters, extra = opt.fit( initial_parameters, loss, - args=(self, hamiltonian, initial_state), + args=(self, self.hamiltonian, initial_state), fit_options=fit_options, ) - print(extra) + self.set_parameters(parameters) return result, parameters, extra diff --git a/src/qibo/optimizers/gradient_based.py b/src/qibo/optimizers/gradient_based.py index 042679ebcc..b2dd131bf8 100644 --- a/src/qibo/optimizers/gradient_based.py +++ b/src/qibo/optimizers/gradient_based.py @@ -74,10 +74,10 @@ def __init__( def fit( self, - loss, initial_parameters, + loss, args=(), - epochs=100000, + fit_options={"epochs": 10000}, nmessage=100, loss_threshold=None, ): @@ -122,7 +122,7 @@ def sgd_step(): self.backend.compile(sgd_step) # SGD procedure: loop over epochs - for epoch in range(epochs): + for epoch in range(fit_options["epochs"]): # early stopping if loss_threshold has been set if ( loss_threshold is not None diff --git a/src/qibo/optimizers/heuristics.py b/src/qibo/optimizers/heuristics.py index a3ee413d7a..330d98f0dc 100644 --- a/src/qibo/optimizers/heuristics.py +++ b/src/qibo/optimizers/heuristics.py @@ -28,7 +28,7 @@ def __init__(self, options={"sigma0": 0.5}): check_options(function=cma.fmin2, options=options) self.set_options(options) - def fit(self, loss, initial_parameters, args=(), fit_options={}): + def fit(self, initial_parameters, loss, args=(), fit_options={}): """Perform the optimizations via CMA-ES. Args: diff --git a/src/qibo/optimizers/minimizers.py b/src/qibo/optimizers/minimizers.py index 72fde047a4..6f4eff0ffa 100644 --- a/src/qibo/optimizers/minimizers.py +++ b/src/qibo/optimizers/minimizers.py @@ -45,16 +45,12 @@ def fit(self, initial_parameters, loss=None, args=(), fit_options={}): # update options with minimizer extra options self.set_options({"options": fit_options}) - print("####", self.options) - print( - initial_parameters, loss(np.array(initial_parameters), *args), self.options - ) r = minimize( loss, initial_parameters, args=args, - options=self.options, + **self.options, ) return r.fun, r.x, r diff --git a/tests/test_models_tsp.py b/tests/test_models_tsp.py index 42e9379230..7fe8b50120 100644 --- a/tests/test_models_tsp.py +++ b/tests/test_models_tsp.py @@ -15,7 +15,6 @@ def qaoa_function_of_layer(backend, layer): in the number of layers and compute the distance of the mode of the histogram obtained from QAOA """ - np.random.seed(42) num_cities = 3 distance_matrix = np.array([[0, 0.9, 0.8], [0.4, 0, 0.1], [0, 0.7, 0]]) # there are two possible cycles, one with distance 1, one with distance 1.9 @@ -33,7 +32,6 @@ def qaoa_function_of_layer(backend, layer): initial_state=initial_state, initial_parameters=np.array([0.1 for _ in range(layer)]), fit_options={"maxiter": 1}, - hamiltonian=obj_hamil, ) return qaoa.execute(initial_state) diff --git a/tests/test_models_variational.py b/tests/test_models_variational.py index 45c697cca3..1ea8f118f5 100644 --- a/tests/test_models_variational.py +++ b/tests/test_models_variational.py @@ -81,10 +81,8 @@ def myloss(parameters, circuit, target): # perform optimization from qibo.optimizers.minimizers import ScipyMinimizer - opt = ScipyMinimizer( - x0, myloss, args=(c, data), options={"method": method}, minimizer_kwargs=options - ) - best, params, _ = opt.fit() + opt = ScipyMinimizer(options={"method": method}) + best, params, _ = opt.fit(x0, myloss, args=(c, data), fit_options=options) if filename is not None: assert_regression_fixture(backend, params, filename) @@ -98,8 +96,8 @@ def myloss(parameters, circuit, target): ("parallel_L-BFGS-B", {"maxiter": 1}, True, None), ("parallel_L-BFGS-B", {"maxiter": 1}, False, None), ("cma", {"maxfevals": 2}, False, None), - ("sgd", {"nepochs": 5}, False, None), - ("sgd", {"nepochs": 5}, True, None), + ("sgd", {"epochs": 5}, False, None), + ("sgd", {"epochs": 5}, True, None), ] @@ -129,20 +127,20 @@ def test_vqe(backend, method, options, compile, filename): v = models.VQE(circuit, hamiltonian) if method == "cma": - opt = CMAES(initial_parameters, optimizer_kwargs=options) + opt = CMAES() elif method == "parallel_L-BFGS-B": - opt = ParallelBFGS(initial_parameters, minimizer_kwargs=options) + opt = ParallelBFGS() elif method == "sgd": - opt = TensorflowSGD(initial_parameters) + opt = TensorflowSGD() else: - opt = ScipyMinimizer( - initial_parameters, options={"method": method}, minimizer_kwargs=options - ) + opt = ScipyMinimizer(options={"method": method}) - best, params, _ = v.minimize(opt, compile=compile, epochs=5) + best, params, _ = v.minimize( + opt, initial_parameters, fit_options=options, compile=compile + ) if method == "cma": # remove `outcmaes` folder @@ -248,9 +246,9 @@ def test_qaoa_errors(backend): qaoa = models.QAOA(h, solver="rk4", accelerators={"/GPU:0": 2}) # minimize with odd number of parameters qaoa = models.QAOA(h) - opt = CMAES(np.random.random(5)) + opt = CMAES() with pytest.raises(ValueError): - qaoa.minimize(opt) + qaoa.minimize(opt, initial_parameters=np.random.random(5)) test_names = "method,options,dense,filename" @@ -258,7 +256,7 @@ def test_qaoa_errors(backend): ("BFGS", {"maxiter": 1}, True, "qaoa_bfgs.out"), ("BFGS", {"maxiter": 1}, False, "trotter_qaoa_bfgs.out"), ("Powell", {"maxiter": 1}, False, "trotter_qaoa_powell.out"), - ("sgd", {"nepochs": 5}, True, None), + ("sgd", {"epochs": 5}, True, None), ] @@ -271,14 +269,14 @@ def test_qaoa_optimization(backend, method, options, dense, filename): initial_p = [0.05, 0.06, 0.07, 0.08] if method == "sgd": - opt = TensorflowSGD(initial_p) + opt = TensorflowSGD() else: - opt = ScipyMinimizer( - initial_p, options={"method": method}, minimizer_kwargs=options - ) + opt = ScipyMinimizer(options={"method": method}) - best, params, extra = qaoa.minimize(opt, epochs=5) + best, params, extra = qaoa.minimize( + opt, initial_parameters=initial_p, fit_options=options + ) if filename is not None: assert_regression_fixture(backend, params, filename) @@ -351,21 +349,20 @@ def test_aavqe(backend, method, options, compile, filename): initial_parameters = np.random.uniform(0, 2 * np.pi, 2 * nqubits * layers + nqubits) if method == "cma": - opt = CMAES(initial_parameters, optimizer_kwargs=options) + opt = CMAES() elif method == "parallel_L-BFGS-B": - opt = ParallelBFGS(initial_parameters, minimizer_kwargs=options) + opt = ParallelBFGS() elif method == "sgd": - opt = TensorflowSGD(initial_parameters) + opt = TensorflowSGD() else: - opt = ScipyMinimizer( - initial_parameters, options={"method": method}, minimizer_kwargs=options - ) + opt = ScipyMinimizer(options={"method": method}) - best, params = aavqe.minimize(opt, compile=compile) - print(best) + best, params = aavqe.minimize( + opt, initial_parameters, fit_options=options, compile=compile + ) if method == "cma": # remove `outcmaes` folder @@ -387,9 +384,11 @@ def test_custom_loss(test_input, test_param, expected): qaoa = models.QAOA(h) initial_p = [0.314, 0.22, 0.05, 0.59] - opt = ScipyMinimizer(initial_p, loss=test_input) + opt = ScipyMinimizer() - best, params, extra = qaoa.minimize(opt, loss_func_param=test_param) + best, params, extra = qaoa.minimize( + opt, initial_p, loss=test_input, loss_func_param=test_param + ) # best, params, _ = qaoa.minimize( # initial_p, loss_func=test_input, loss_func_param=test_param # )