From c57ad618186f1a1c647effba62e489c08ea32bb7 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Thu, 29 Feb 2024 11:08:55 +0100 Subject: [PATCH 01/12] Update to 1.9.4 (#330) * fixing syntax issue in post_init of dataclass (#327) * fixing syntax issue in post_init of dataclass * phoenics dropped due to maintenance resources * more convenient randomization initialization for OO, avoiding numpy warnings * Update version.py (#329) --- requirements.txt | 1 - src/tequila/apps/adapt/adapt.py | 2 +- src/tequila/optimizers/__init__.py | 15 +- src/tequila/optimizers/optimizer_gpyopt.py | 2 - src/tequila/optimizers/optimizer_phoenics.py | 348 ------------------ .../quantumchemistry/chemistry_tools.py | 4 +- .../quantumchemistry/madness_interface.py | 2 +- .../quantumchemistry/orbital_optimizer.py | 10 +- src/tequila/quantumchemistry/qc_base.py | 2 +- src/tequila/version.py | 2 +- tests/test_noise_opt.py | 14 - tests/test_phoenics.py | 68 ---- 12 files changed, 13 insertions(+), 457 deletions(-) delete mode 100644 src/tequila/optimizers/optimizer_phoenics.py delete mode 100644 tests/test_phoenics.py diff --git a/requirements.txt b/requirements.txt index 6f9ae975..0dd2e329 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,6 @@ qulacs # default simulator (best integration), remove if the installation gives #qibo <= 0.1.1 # can not be installed in the same environment as gpyopt #optional optimizers -#phoenics # version on PyPi isc urrently broken, we recommend to install from source (AAG github) #gpyopt # not in combination with qibo as quantum backend #optional third party libraries diff --git a/src/tequila/apps/adapt/adapt.py b/src/tequila/apps/adapt/adapt.py index 6b8dd9e9..4b9a5549 100644 --- a/src/tequila/apps/adapt/adapt.py +++ b/src/tequila/apps/adapt/adapt.py @@ -21,7 +21,7 @@ class AdaptParameters: degeneracy_threshold: float = 5.e-4 silent: bool = False - def __post__init__(self): + def __post_init__(self): # avoid stacking of same operator-types in a row if "method_options" in self.optimizer_args: if "gtol" in self.optimizer_args["method_options"]: diff --git a/src/tequila/optimizers/__init__.py b/src/tequila/optimizers/__init__.py index 0022f7de..032606c1 100644 --- a/src/tequila/optimizers/__init__.py +++ b/src/tequila/optimizers/__init__.py @@ -16,7 +16,7 @@ class _Optimizers: methods: list = None -SUPPORTED_OPTIMIZERS = ['scipy', 'phoenics', 'gpyopt', 'gd'] +SUPPORTED_OPTIMIZERS = ['scipy', 'gpyopt', 'gd'] INSTALLED_OPTIMIZERS = {} INSTALLED_OPTIMIZERS['scipy'] = _Optimizers(cls=OptimizerSciPy, minimize=minimize_scipy, @@ -37,19 +37,6 @@ class _Optimizers: except ImportError: has_gpyopt = False -has_phoenics = False -try: - from tequila.optimizers.optimizer_phoenics import OptimizerPhoenics - from tequila.optimizers.optimizer_phoenics import minimize as minimize_phoenics - - INSTALLED_OPTIMIZERS['phoenics'] = _Optimizers(cls=OptimizerPhoenics, - minimize=minimize_phoenics, - methods=OptimizerPhoenics.available_methods()) - has_phoenics = True -except ImportError: - has_phoenics = False - - def show_available_optimizers(module=None): """ Returns diff --git a/src/tequila/optimizers/optimizer_gpyopt.py b/src/tequila/optimizers/optimizer_gpyopt.py index 50834e07..cfde41b9 100644 --- a/src/tequila/optimizers/optimizer_gpyopt.py +++ b/src/tequila/optimizers/optimizer_gpyopt.py @@ -4,8 +4,6 @@ import numbers from tequila.objective.objective import Variable import warnings - -warnings.simplefilter("ignore") import GPyOpt from GPyOpt.methods import BayesianOptimization import numpy as np diff --git a/src/tequila/optimizers/optimizer_phoenics.py b/src/tequila/optimizers/optimizer_phoenics.py deleted file mode 100644 index e2aa77f5..00000000 --- a/src/tequila/optimizers/optimizer_phoenics.py +++ /dev/null @@ -1,348 +0,0 @@ -from tequila.objective.objective import Objective -from tequila.optimizers.optimizer_base import Optimizer, OptimizerResults, dataclass -import typing -import numbers -from tequila.objective.objective import Variable -import copy -import warnings -import pickle -import time -from tequila import TequilaException -warnings.simplefilter("ignore") -with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - warnings.filterwarnings("ignore") -import phoenics - -import numpy as np -from numpy import pi as pi -from tequila.simulators.simulator_api import compile_objective -import os - -#numpy, tf, etc can get real, real, real, noisy here. We suppress it. -warnings.filterwarnings('ignore', category=DeprecationWarning) -os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' -warnings.filterwarnings('ignore', category=FutureWarning) - -@dataclass -class PhoenicsResults(OptimizerResults): - - observations: list = None - phoenics_instance: phoenics.Phoenics = None - -class OptimizerPhoenics(Optimizer): - """ - wrapper to allow optimization of objectives with Phoenics, a bayesian optimizer. - See: https://github.com/aspuru-guzik-group/phoenics - """ - @classmethod - def available_methods(cls): - return ["phoenics"] - - def __init__(self, maxiter, backend=None, save_history=True, minimize=True, - samples=None, silent=None, noise=None, device=None): - self._minimize = minimize - - super().__init__(backend=backend, maxiter=maxiter, samples=samples, - noise=noise,device=device, - save_history=save_history, silent=silent) - - def _process_for_sim(self, recommendation, passive_angles): - """ - convert from the phoenics suggestion format to a version recognizable by objectives. - Parameters - ---------- - recommendation: dict: - the a phoenics suggestion. - passive_angles: dict: - passive angles not optimized over. - - Returns - ------- - dict: - dict of Bariable, float pairs. - """ - rec = copy.deepcopy(recommendation) - for part in rec: - for k, v in part.items(): - part[k] = v.item() - if passive_angles is not None: - for k, v in passive_angles.items(): - part[k] = v - return rec - - def _process_for_phoenics(self, pset, result, passive_angles=None): - """ - Convert results of a call to an objective into a form interpretable by phoenics. - Parameters - ---------- - pset: dict: - the parameters evaluated, as a dictionary - result: - the result of calling some objective, using pset as parameters. - passive_angles: dict, optional: - passive_angles, not optimized over. - - Returns - ------- - dict: - the a dictionary, formatted as phoenics prefers it, for use as an 'observation'. - """ - new = copy.deepcopy(pset) - for k, v in new.items(): - new[k] = np.array([v], dtype=np.float32) - if passive_angles is not None: - for k in passive_angles.keys(): - del new[k] - new['Energy'] = result - - return new - - def _make_phoenics_object(self, objective, passive_angles=None, conf=None, *args, **kwargs): - """ - instantiate phoenics, to perform optimization. - - Parameters - ---------- - objective: Objective: - the objective to optimize over. - passive_angles: dict, optional: - a dictionary of angles not to optimize over. - conf: optional: - a user built configuration object or file, from which to initialize a phoenics object. - For advanced users only. - args - kwargs - - Returns - ------- - phoenics.Phoenics - a phoenics object configured to optimize an objective. - """ - if conf is not None: - if hasattr(conf, 'readlines'): - bird = phoenics.Phoenics(config_file=conf) - else: - bird = phoenics.Phoenics(config_dict=conf) - - return bird - op = objective.extract_variables() - if passive_angles is not None: - for i, thing in enumerate(op): - if thing in passive_angles.keys(): - op.remove(thing) - - config = {"general": {"auto_desc_gen": "False", "batches": 5, "boosted": "False", "parallel": "False", "scratch_dir":os.getcwd()}} - config['parameters'] = [ - {'name': k, 'periodic': 'True', 'type': 'continuous', 'size': 1, 'low': 0, 'high': 2 * pi} for k in op] - if self._minimize is True: - config['objectives'] = [{"name": "Energy", "goal": "minimize"}] - else: - config['objectives'] = [{"name": "Energy", "goal": "maximize"}] - - for k,v in kwargs.items(): - if hasattr(k, "lower") and k.lower() in config["general"]: - config["general"][k.lower()] = v - - bird = phoenics.Phoenics(config_dict=config) - return bird - - def __call__(self, objective: Objective, - maxiter=None, - variables: typing.List[Variable] = None, - initial_values: typing.Dict[Variable, numbers.Real] = None, - previous=None, - phoenics_config=None, - file_name=None, - *args, - **kwargs): - """ - Perform optimization with phoenics. - - Parameters - ---------- - objective: Objective - the objective to optimize. - maxiter: int: - (Default value = None) - if not None, overwrite the init maxiter with new number. - variables: list: - (Default value = None) - which variables to optimize over. If None: all of the variables in objective are used. - initial_values: dict: - (Default value = None) - an initial point to begin optimization from. Random, if None. - previous: - previous observations, formatted for phoenics, to use in optimization. For use by advanced users. - phoenics_config: - a config for a phoenics object. - file_name: - a file - args - kwargs - - Returns - ------- - PhoenicsResults: - the results of optimization by phoenics. - - """ - - objective = objective.contract() - active_angles, passive_angles, variables = self.initialize_variables(objective, - initial_values=initial_values, - variables=variables) - - if maxiter is None: - maxiter = 10 - - obs = [] - bird = self._make_phoenics_object(objective, passive_angles, phoenics_config, *args, **kwargs) - if previous is not None: - if type(previous) is str: - try: - obs = pickle.load(open(previous, 'rb')) - except: - print( - 'failed to load previous observations, which are meant to be a pickle file. Starting fresh.') - elif type(previous) is list: - if all([type(k) == dict for k in previous]): - obs = previous - else: - print('previous observations were not in the correct format (list of dicts). Starting fresh.') - - - - if not (type(file_name) == str or file_name == None): - raise TequilaException('file_name must be a string, or None. Recieved {}'.format(type(file_name))) - - best = None - best_angles = None - - # avoid multiple compilations - compiled_objective = compile_objective(objective=objective, backend=self.backend, - device=self.device, - samples=self.samples, noise=self.noise) - - if not self.silent: - print('phoenics has recieved') - print("objective: \n") - print(objective) - print("noise model : {}".format(self.noise)) - print("samples : {}".format(self.samples)) - print("maxiter : {}".format(maxiter)) - print("variables : {}".format(objective.extract_variables())) - print("passive var : {}".format(passive_angles)) - print('now lets begin') - for i in range(0, maxiter): - with warnings.catch_warnings(): - np.testing.suppress_warnings() - warnings.simplefilter("ignore") - warnings.filterwarnings("ignore", category=FutureWarning) - - precs = bird.recommend(observations=obs) - - runs = [] - recs = self._process_for_sim(precs, passive_angles=passive_angles) - - start = time.time() - for j, rec in enumerate(recs): - En = compiled_objective(variables=rec, samples=self.samples, noise=self.noise) - runs.append((rec, En)) - if not self.silent: - if self.print_level > 2: - print("energy = {:+2.8f} , angles=".format(En), rec) - else: - print("energy = {:+2.8f}".format(En)) - stop = time.time() - if not self.silent: - print("Quantum Objective evaluations: {}s Wall-Time".format(stop-start)) - - for run in runs: - angles = run[0] - E = run[1] - if best is None: - best = E - best_angles = angles - else: - if self._minimize: - if E < best: - best = E - best_angles = angles - else: - if E > best: - best = E - best_angles = angles - - if self.save_history: - self.history.energies.append(E) - self.history.angles.append(angles) - obs.append(self._process_for_phoenics(angles, E, passive_angles=passive_angles)) - - if file_name is not None: - with open(file_name, 'wb') as file: - pickle.dump(obs, file) - - if not self.silent: - print("best energy after {} iterations : {:+2.8f}".format(self.maxiter, best)) - return PhoenicsResults(energy=best, variables=best_angles, history=self.history, observations=obs,phoenics_instance=bird) - - -def minimize(objective: Objective, - maxiter: int = None, - samples: int = None, - variables: typing.List = None, - initial_values: typing.Dict = None, - backend: str = None, - noise=None, - device: str = None, - previous: typing.Union[str, list] = None, - phoenics_config: typing.Union[str, typing.Dict] = None, - file_name: str = None, - silent: bool = False, - *args, - **kwargs) -> PhoenicsResults: - """ - - Parameters - ---------- - objective: Objective: - The tequila objective to optimize - initial_values: typing.Dict[typing.Hashable, numbers.Real], optional: - Initial values as dictionary of Hashable types (variable keys) and floating point numbers. - If given None they will be randomized. - variables: typing.List[typing.Hashable], optional: - List of Variables to optimize - samples: int, optional: - samples/shots to take in every run of the quantum circuits (None activates full wavefunction simulation) - maxiter: int: - how many iterations of phoenics to run. - Note that this is NOT identical to the number of times the circuit will run. - backend: str, optional: - Simulator backend, will be automatically chosen if set to None - noise: NoiseModel, optional: - a noise model to apply to the circuits of Objective. - device: optional: - the device from which to (potentially, simulatedly) sample all quantum circuits employed in optimization. - previous: optional: - Previous phoenics observations. If string, the name of a file from which to load them. Else, a list. - phoenics_config: optional: - a pre-made phoenics configuration. if str, the name of a file from which to load it; Else, a dictionary. - Individual keywords of the 'general' sections can also be passed down as kwargs - file_name: str, optional: - where to save output to, if save_to_file is True. - kwargs: dict: - Send down more keywords for single replacements in the phoenics config 'general' section, like e.g. batches=5, - boosted=True etc - Returns - ------- - PhoenicsResults: - the result of an optimization by phoenics. - """ - - optimizer = OptimizerPhoenics(samples=samples, backend=backend, - noise=noise,device=device, - maxiter=maxiter, silent=silent) - return optimizer(objective=objective, initial_values=initial_values, variables=variables, previous=previous, - maxiter=maxiter, - phoenics_config=phoenics_config, file_name=file_name, *args, **kwargs) diff --git a/src/tequila/quantumchemistry/chemistry_tools.py b/src/tequila/quantumchemistry/chemistry_tools.py index 4d9b2e79..ff1f3ff1 100644 --- a/src/tequila/quantumchemistry/chemistry_tools.py +++ b/src/tequila/quantumchemistry/chemistry_tools.py @@ -569,7 +569,7 @@ def _verify_ordering_of(self, trials=100): return True def __init__(self, elems: numpy.ndarray = None, active_indices: list = None, ordering: str = None, - size_full: int = None): + size_full: int = None, verify=False): """ Parameters ---------- @@ -611,7 +611,7 @@ def __init__(self, elems: numpy.ndarray = None, active_indices: list = None, ord if self.order == 4: if ordering is None: ordering = self.identify_ordering() - else: + elif verify: try: # some RDMs are really sloppy (depends on backend) auto_ordering=self.identify_ordering() if auto_ordering is not ordering: diff --git a/src/tequila/quantumchemistry/madness_interface.py b/src/tequila/quantumchemistry/madness_interface.py index e56d58e3..9273046c 100644 --- a/src/tequila/quantumchemistry/madness_interface.py +++ b/src/tequila/quantumchemistry/madness_interface.py @@ -122,7 +122,7 @@ def __init__(self, parameters: ParametersQC, h = "failed" g = "failed" - if "failed" in h or "failed" in g: + if (isinstance(h, str) and "failed" in h) or (isinstance(g, str) and "failed" in g): status = "found {}_htensor.npy={}\n".format(name, "failed" not in h) status += "found {}_gtensor.npy={}\n".format(name, "failed" not in g) try: diff --git a/src/tequila/quantumchemistry/orbital_optimizer.py b/src/tequila/quantumchemistry/orbital_optimizer.py index 06b36553..a7bca8d7 100644 --- a/src/tequila/quantumchemistry/orbital_optimizer.py +++ b/src/tequila/quantumchemistry/orbital_optimizer.py @@ -118,13 +118,15 @@ def optimize_orbitals(molecule, circuit=None, vqe_solver=None, pyscf_arguments=N print(wrapper) if initial_guess is not None: if hasattr(initial_guess, "lower"): - if "random" in initial_guess.lower(): - scale = 0.1 + if "random" or "near_zero" in initial_guess.lower(): + scale = 1.e-3 + if "random" in initial_guess.lower(): + scale = 1.0 loc = 0.0 if "scale" in kwargs: - scale = kwargs["scale"] + scale = float(initial_guess.split("scale")[1].split("_")[0].split("=")[1]) if "loc" in kwargs: - loc = kwargs["loc"] + loc = float(initial_guess.split("loc")[1].split("_")[0].split("=")[1]) initial_guess = numpy.eye(no) + numpy.random.normal(scale=scale, loc=loc, size=no * no).reshape(no, no) else: raise Exception("Unknown initial_guess={}".format(initial_guess.lower())) diff --git a/src/tequila/quantumchemistry/qc_base.py b/src/tequila/quantumchemistry/qc_base.py index f2da7b28..9689b042 100644 --- a/src/tequila/quantumchemistry/qc_base.py +++ b/src/tequila/quantumchemistry/qc_base.py @@ -1939,7 +1939,7 @@ def _reset_rdm(rdm): self._rdm2 = _assemble_rdm2_spinful(evals_2) if get_rdm2 else self._rdm2 if get_rdm2: - rdm2 = NBodyTensor(elems=self.rdm2, ordering="dirac") + rdm2 = NBodyTensor(elems=self.rdm2, ordering="dirac", verify=False) rdm2.reorder(to=ordering) rdm2 = rdm2.elems self._rdm2 = rdm2 diff --git a/src/tequila/version.py b/src/tequila/version.py index f4531a81..a54a7564 100644 --- a/src/tequila/version.py +++ b/src/tequila/version.py @@ -1,2 +1,2 @@ -__version__ = "1.9.3" +__version__ = "1.9.4" __author__ = "Tequila Developers " diff --git a/tests/test_noise_opt.py b/tests/test_noise_opt.py index 043159ed..a48b9c66 100644 --- a/tests/test_noise_opt.py +++ b/tests/test_noise_opt.py @@ -59,20 +59,6 @@ def test_bit_flip_scipy_hessian(p, method): result = tq.optimizer_scipy.minimize(objective=O, samples=1, backend=simulator, method=method, noise=NM, tol=1.e-4, silent=False) - -@pytest.mark.skipif(len(samplers) == 0, reason="Missing necessary backends") -@pytest.mark.skipif(not tq.optimizers.has_phoenics, reason="Missing phoenics installation") -@pytest.mark.parametrize("p", numpy.random.uniform(0.1, .4, 1)) -def test_bit_flip_phoenics(p): - simulator = numpy.random.choice(samplers) - qubit = 0 - H = paulis.Qm(qubit) - U = gates.Rx(target=qubit, angle=tq.Variable('a')) - O = ExpectationValue(U=U, H=H) - NM = BitFlip(p, 1) - result = tq.optimizers.optimizer_phoenics.minimize(objective=O, maxiter=3, samples=1, backend=simulator, noise=NM) - - @pytest.mark.skipif(len(samplers) == 0, reason="Missing necessary backends") @pytest.mark.skipif(not tq.optimizers.has_gpyopt, reason="Missing gpyopt installation") @pytest.mark.parametrize("p", numpy.random.uniform(0.1, .4, 1)) diff --git a/tests/test_phoenics.py b/tests/test_phoenics.py deleted file mode 100644 index 0cea65eb..00000000 --- a/tests/test_phoenics.py +++ /dev/null @@ -1,68 +0,0 @@ -import pytest, numpy -import tequila as tq -import multiprocessing as mp - -# Get QC backends for parametrized testing -import select_backends -simulators = select_backends.get() -samplers = select_backends.get(sampler=True) - -has_phoenics = 'phoenics' in tq.INSTALLED_OPTIMIZERS - -@pytest.mark.dependencies -def test_dependencies(): - assert 'phoenics' in tq.INSTALLED_OPTIMIZERS - - -@pytest.mark.skipif(condition=not has_phoenics, reason="you don't have phoenics") -@pytest.mark.parametrize("simulator", simulators) -def test_execution(simulator): - U = tq.gates.Rz(angle="a", target=0) \ - + tq.gates.X(target=2) \ - + tq.gates.Ry(angle="b", target=1, control=2) \ - + tq.gates.Trotterized(angles=["c", "d"], - generators=[-0.25 * tq.paulis.Z(1), tq.paulis.X(0) + tq.paulis.Y(1)], steps=2) \ - + tq.gates.Trotterized(angles=[1.0, 2.0], - generators=[-0.25 * tq.paulis.Z(1), tq.paulis.X(0) + tq.paulis.Y(1)], steps=2) \ - + tq.gates.ExpPauli(angle="a", paulistring="X(0)Y(1)Z(2)") - - H = 1.0 * tq.paulis.X(0) + 2.0 * tq.paulis.Y(1) + 3.0 * tq.paulis.Z(2) - O = tq.ExpectationValue(U=U, H=H) - result = tq.minimize(method="phoenics", objective=O, maxiter=1, backend=simulator) - - -@pytest.mark.skipif(condition=not has_phoenics, reason="you don't have phoenics") -@pytest.mark.parametrize("simulator", samplers) -def test_execution_shot(simulator): - U = tq.gates.Rz(angle="a", target=0) \ - + tq.gates.X(target=2) \ - + tq.gates.Ry(angle="b", target=1, control=2) \ - + tq.gates.Trotterized(angles=["c", "d"], - generators=[-0.25 * tq.paulis.Z(1), tq.paulis.X(0) + tq.paulis.Y(1)], steps=2) \ - + tq.gates.Trotterized(angles=[1.0, 2.0], - generators=[-0.25 * tq.paulis.Z(1), tq.paulis.X(0) + tq.paulis.Y(1)], steps=2) \ - + tq.gates.ExpPauli(angle="a", paulistring="X(0)Y(1)Z(2)") - H = 1.0 * tq.paulis.X(0) + 2.0 * tq.paulis.Y(1) + 3.0 * tq.paulis.Z(2) - O = tq.ExpectationValue(U=U, H=H) - mi = 2 - result = tq.minimize(method="phoenics", objective=O, maxiter=mi, backend=simulator) - - -@pytest.mark.skipif(condition=not has_phoenics, reason="you don't have phoenics") -@pytest.mark.parametrize("simulator", simulators) -def test_one_qubit_wfn(simulator): - U = tq.gates.Trotterized(angles=["a"], steps=1, generators=[tq.paulis.Y(0)]) - H = tq.paulis.X(0) - O = tq.ExpectationValue(U=U, H=H) - result = tq.minimize(method="phoenics", objective=O, maxiter=8, backend=simulator) - assert (numpy.isclose(result.energy, -1.0, atol=1.e-2)) - - -@pytest.mark.skipif(condition=not has_phoenics, reason="you don't have phoenics") -@pytest.mark.parametrize("simulator", samplers) -def test_one_qubit_shot(simulator): - U = tq.gates.Trotterized(angles=["a"], steps=1, generators=[tq.paulis.Y(0)]) - H = tq.paulis.X(0) - O = tq.ExpectationValue(U=U, H=H) - result = tq.minimize(method="phoenics", objective=O, maxiter=3, backend=simulator, samples=10000) - assert (numpy.isclose(result.energy, -1.0, atol=1.e-1)) From a3d532b7f041d4df932f6104f5e1622cbea6f05e Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 25 Mar 2024 10:56:11 +0100 Subject: [PATCH 02/12] Update README.md --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 3cec3281..f51e8b75 100644 --- a/README.md +++ b/README.md @@ -240,6 +240,10 @@ A.G. Cadavid, I. Montalban, A. Dalal, E. Solano, N.N. Hegade Efficient DCQO Algorithm within the Impulse Regime for Portfolio Optimization [arxiv:2308.15475](https://arxiv.org/abs/2308.15475) +P.W.K. Jensen, E.R. Kjellgren, P. Reinholdt, K.M. Ziems, S. Coriani, J. Kongsted, S. Sauer +Quantum Equation of Motion with Orbital Optimization for Computing Molecular Properties in Near-Term Quantum Computing +[arxiv:2312.12386](https://arxiv.org/abs/2312.12386) + Let us know, if you want your research project and/or tutorial to be included in this list! # Dependencies From a547f3c046c417f05687f30a7b04ec28362cd356 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 1 Apr 2024 23:45:59 +0200 Subject: [PATCH 03/12] Update README.md --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index f51e8b75..9c350865 100644 --- a/README.md +++ b/README.md @@ -240,6 +240,10 @@ A.G. Cadavid, I. Montalban, A. Dalal, E. Solano, N.N. Hegade Efficient DCQO Algorithm within the Impulse Regime for Portfolio Optimization [arxiv:2308.15475](https://arxiv.org/abs/2308.15475) +A. Anand, K. Brown +Hamiltonians, groups, graphs and ansätze +[arxiv:2312.17146](https://arxiv.org/abs/2312.17146) + P.W.K. Jensen, E.R. Kjellgren, P. Reinholdt, K.M. Ziems, S. Coriani, J. Kongsted, S. Sauer Quantum Equation of Motion with Orbital Optimization for Computing Molecular Properties in Near-Term Quantum Computing [arxiv:2312.12386](https://arxiv.org/abs/2312.12386) From 086ff99b9afd9f809f6d04788bb00f37b56a3d3b Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Fri, 5 Apr 2024 10:57:20 +0200 Subject: [PATCH 04/12] Update version.py --- src/tequila/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tequila/version.py b/src/tequila/version.py index cb9458d9..09ed92e6 100644 --- a/src/tequila/version.py +++ b/src/tequila/version.py @@ -1,2 +1,2 @@ -__version__ = "1.9.6-dev" +__version__ = "1.9.5" __author__ = "Tequila Developers " From a98c38ed2b8638510f7e0f0c3737cb230cf02267 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:37:48 +0200 Subject: [PATCH 05/12] updated github runners to 3.9 3.10 --- .github/workflows/ci_backends.yml | 2 +- .github/workflows/ci_basic.yml | 2 +- .github/workflows/ci_basic_autograd.yml | 8 ++++---- .github/workflows/ci_chemistry_madness.yml | 2 +- .github/workflows/ci_chemistry_pyscf.yml | 2 +- .github/workflows/ci_conda_madness.yml | 2 +- .github/workflows/ci_ml.yml | 2 +- .github/workflows/ci_optimizers.yml | 2 +- .github/workflows/ci_pyquil.yml | 2 +- .github/workflows/pypi.yml | 2 +- .github/workflows/release.yml | 22 ---------------------- 11 files changed, 13 insertions(+), 35 deletions(-) delete mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/ci_backends.yml b/.github/workflows/ci_backends.yml index c9c5852c..9ae5d9ab 100644 --- a/.github/workflows/ci_backends.yml +++ b/.github/workflows/ci_backends.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: ['3.9'] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_basic.yml b/.github/workflows/ci_basic.yml index d57d144b..80551785 100644 --- a/.github/workflows/ci_basic.yml +++ b/.github/workflows/ci_basic.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: ['3.9', '3.10'] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_basic_autograd.yml b/.github/workflows/ci_basic_autograd.yml index bfcb7dfc..82c64f29 100644 --- a/.github/workflows/ci_basic_autograd.yml +++ b/.github/workflows/ci_basic_autograd.yml @@ -1,7 +1,7 @@ # This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -name: Tequila-Test-Basic-Autograd +name: Tequila-Test-Basic-JAX on: push: @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.9', '3.10'] steps: - uses: actions/checkout@v2 @@ -28,8 +28,8 @@ jobs: run: | python -m pip install --upgrade pip pip install -r requirements.txt - pip uninstall -y jax jaxlib - pip install autograd + pip uninstall autograd + pip install -y jax jaxlib - name: Lint with flake8 run: | pip install flake8 diff --git a/.github/workflows/ci_chemistry_madness.yml b/.github/workflows/ci_chemistry_madness.yml index 2dd7e736..7c903227 100644 --- a/.github/workflows/ci_chemistry_madness.yml +++ b/.github/workflows/ci_chemistry_madness.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: ['3.10'] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_chemistry_pyscf.yml b/.github/workflows/ci_chemistry_pyscf.yml index 64d75b44..f68e6c64 100644 --- a/.github/workflows/ci_chemistry_pyscf.yml +++ b/.github/workflows/ci_chemistry_pyscf.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: [3.9] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_conda_madness.yml b/.github/workflows/ci_conda_madness.yml index fc70be6d..ed042d4a 100644 --- a/.github/workflows/ci_conda_madness.yml +++ b/.github/workflows/ci_conda_madness.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: [3.9] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_ml.yml b/.github/workflows/ci_ml.yml index aeb00d54..e649b377 100644 --- a/.github/workflows/ci_ml.yml +++ b/.github/workflows/ci_ml.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: [3.9] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_optimizers.yml b/.github/workflows/ci_optimizers.yml index 0d544f07..58f11e07 100644 --- a/.github/workflows/ci_optimizers.yml +++ b/.github/workflows/ci_optimizers.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: [3.9] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/ci_pyquil.yml b/.github/workflows/ci_pyquil.yml index 92e39757..926ec8dc 100644 --- a/.github/workflows/ci_pyquil.yml +++ b/.github/workflows/ci_pyquil.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: [3.9] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 4f3f6f3e..09e44946 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -30,7 +30,7 @@ jobs: # remove qulacs from dependencies (issues with windows and mac) # users need to install themselves if they want it - cat requirements.txt | sed "s|qulacs|#qulacs|g" > tmp.txt + #cat requirements.txt | sed "s|qulacs|#qulacs|g" > tmp.txt rm requirements.txt mv tmp.txt requirements.txt python setup.py sdist bdist_wheel diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 1c80674b..00000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: 'Tweet when released' -on: - release: - types: [released] - -jobs: - tweet: - runs-on: ubuntu-latest - steps: - - name: Tweet - id: tweet - uses: doomspec/auto-tweet-v2@v0.1.0 - env: - CONSUMER_API_KEY: ${{ secrets.TWITTER_CONSUMER_API_KEY }} - CONSUMER_API_SECRET_KEY: ${{ secrets.TWITTER_CONSUMER_API_SECRET_KEY }} - ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }} - ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }} - with: - text: | - New version released: ${{ github.event.release.name }} - ${{ github.event.release.html_url }} - - run: echo ${{ steps.tweet.outputs.response }} From 478fffb3db56e6fa7266b78cafa4aa6dfe3df776 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:39:16 +0200 Subject: [PATCH 06/12] slight update in readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 37a0275e..f7640ac7 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ Tequila can execute the underlying quantum expectation values on state of the ar - [talks and slides](https://kottmanj.github.io/talks_and_material/) # Installation -Recommended Python version is 3.9 - 3.10. +Recommended Python version is 3.9 (3.10). Tequila supports linux, osx and windows. However, not all optional dependencies are supported on windows. ## Install from PyPi From b8fd4df6da6fa59aa9fb5550c146299be31331da Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:44:30 +0200 Subject: [PATCH 07/12] Update ci_optimizers.yml --- .github/workflows/ci_optimizers.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/ci_optimizers.yml b/.github/workflows/ci_optimizers.yml index 58f11e07..a36b9d9e 100644 --- a/.github/workflows/ci_optimizers.yml +++ b/.github/workflows/ci_optimizers.yml @@ -23,12 +23,7 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: - python-version: ${{ matrix.python-version }} - - name: Install quantum backends - run: | - pip install "cirq" "qiskit>=0.30" "qulacs" - pip uninstall pyquil -y - + python-version: ${{ matrix.python-version }} - name: Install and test GPyOpt interface (no qibo) run: | python -m pip install --upgrade pip From 6fab0bca0bfcba7063830162a5704b8b0007dce2 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:46:03 +0200 Subject: [PATCH 08/12] Update ci_basic_autograd.yml --- .github/workflows/ci_basic_autograd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_basic_autograd.yml b/.github/workflows/ci_basic_autograd.yml index 82c64f29..84a2d2e0 100644 --- a/.github/workflows/ci_basic_autograd.yml +++ b/.github/workflows/ci_basic_autograd.yml @@ -28,7 +28,7 @@ jobs: run: | python -m pip install --upgrade pip pip install -r requirements.txt - pip uninstall autograd + pip uninstall autograd -y pip install -y jax jaxlib - name: Lint with flake8 run: | From 89b3c89d03a0b474268bc875cc558c29ae990abb Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:49:14 +0200 Subject: [PATCH 09/12] Update ci_optimizers.yml --- .github/workflows/ci_optimizers.yml | 36 +---------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/.github/workflows/ci_optimizers.yml b/.github/workflows/ci_optimizers.yml index a36b9d9e..9cc938c2 100644 --- a/.github/workflows/ci_optimizers.yml +++ b/.github/workflows/ci_optimizers.yml @@ -1,35 +1 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Tequila-Test-Optimizers - -on: - push: - branches: [ master, devel ] - pull_request: - branches: [ master, devel ] - -jobs: - - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.9] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install and test GPyOpt interface (no qibo) - run: | - python -m pip install --upgrade pip - pip install --upgrade -r requirements.txt - pip install --upgrade -r requirements_gpyopt.txt - pip install -e . - - pytest tests/test_gpyopt.py --slow - +# safe to delete From 7e54e4ccfa47baf620984e43acb2dbd23b004d78 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:50:01 +0200 Subject: [PATCH 10/12] Update requirements_gpyopt.txt --- requirements_gpyopt.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements_gpyopt.txt b/requirements_gpyopt.txt index ac7fc7b0..e201bab6 100644 --- a/requirements_gpyopt.txt +++ b/requirements_gpyopt.txt @@ -1,3 +1,5 @@ +### GPyOpt no longer maintained ... still works with python 3.7 +### take care ### requirements for gpyopt optimizer cycler>=0.10.0 decorator>=4.0.10 From 3350c8b7636b0aeea23dfcad40232e47888579ea Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 11:51:04 +0200 Subject: [PATCH 11/12] Update ci_basic_autograd.yml --- .github/workflows/ci_basic_autograd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_basic_autograd.yml b/.github/workflows/ci_basic_autograd.yml index 84a2d2e0..25799934 100644 --- a/.github/workflows/ci_basic_autograd.yml +++ b/.github/workflows/ci_basic_autograd.yml @@ -29,7 +29,7 @@ jobs: python -m pip install --upgrade pip pip install -r requirements.txt pip uninstall autograd -y - pip install -y jax jaxlib + pip install jax jaxlib - name: Lint with flake8 run: | pip install flake8 From 887b828a66ccb3bac767d12e3326b9f25f797ce2 Mon Sep 17 00:00:00 2001 From: "J. S. Kottmann" Date: Mon, 3 Jun 2024 12:11:19 +0200 Subject: [PATCH 12/12] dropping GPyOpt support -- no maintenance for 4 years --- .github/workflows/ci_optimizers.yml | 1 - README.md | 8 +------- requirements_gpyopt.txt | 19 ------------------- 3 files changed, 1 insertion(+), 27 deletions(-) delete mode 100644 .github/workflows/ci_optimizers.yml delete mode 100644 requirements_gpyopt.txt diff --git a/.github/workflows/ci_optimizers.yml b/.github/workflows/ci_optimizers.yml deleted file mode 100644 index 9cc938c2..00000000 --- a/.github/workflows/ci_optimizers.yml +++ /dev/null @@ -1 +0,0 @@ -# safe to delete diff --git a/README.md b/README.md index f7640ac7..63c528f7 100644 --- a/README.md +++ b/README.md @@ -251,11 +251,7 @@ Quantum Equation of Motion with Orbital Optimization for Computing Molecular Pro Let us know, if you want your research project and/or tutorial to be included in this list! # Dependencies -Support for additional optimizers or quantum backends can be activated by intalling them in your environment. -Tequila will then detect them automatically. -Currently those are: [Phoenics](https://github.com/aspuru-guzik-group/phoenics) - and [GPyOpt](https://sheffieldml.github.io/GPyOpt/). -Quantum backends are treated in the same way. +Support for specific backends (quantum simulators, optimizers, quantum chemistry) can be activated by intalling them in your environment. ## Quantum Backends Currently supported @@ -389,5 +385,3 @@ Tequila runs on Mac OSX. You might get in trouble with installing qulacs since it currently does not work with Apple's clang compiler. You need to install latest GNU compile (at least gcc-7 and g++7) and set them as default before installing qulacs over pip. -## Qibo and GPyOpt -Currently you can't use Qibo and GPyOpt within the same environment. diff --git a/requirements_gpyopt.txt b/requirements_gpyopt.txt deleted file mode 100644 index e201bab6..00000000 --- a/requirements_gpyopt.txt +++ /dev/null @@ -1,19 +0,0 @@ -### GPyOpt no longer maintained ... still works with python 3.7 -### take care -### requirements for gpyopt optimizer -cycler>=0.10.0 -decorator>=4.0.10 -numpy>=1.11.2 -six>=1.10.0 -python-dateutil>=2.6.0 -paramz>=0.7.0 -GPy>=1.8 -matplotlib>=1.5.3 -pyparsing>=2.1.10 -pytz>=2016.7 -scipy>=0.18.1 -mock>=2.0.0 -PyDOE >= 0.3.0 -sobol_seq >=0.1 -emcee>=2.2.1 -gpyopt