Skip to content

Commit

Permalink
improve some rho setter options and help text (#450)
Browse files Browse the repository at this point in the history
* improve some rho setter options and help text

* add a space

* really add space
  • Loading branch information
DLWoodruff authored Nov 15, 2024
1 parent 5d97ec5 commit a52263c
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 15 deletions.
6 changes: 3 additions & 3 deletions examples/farmer/farmer_rho_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def main():
if cfg.use_norm_rho_converger:
if not cfg.use_norm_rho_updater:
raise RuntimeError("--use-norm-rho-converger requires --use-norm-rho-updater")
elif cfg.grad_rho_setter:
elif cfg.grad_rho:
raise RuntimeError("You cannot have--use-norm-rho-converger and --grad-rho-setter")
else:
ph_converger = NormRhoConverger
Expand All @@ -103,7 +103,7 @@ def main():
beans = (cfg, scenario_creator, scenario_denouement, all_scenario_names)

ext_classes = []
if cfg.grad_rho_setter:
if cfg.grad_rho:
ext_classes.append(Gradient_extension)

if cfg.run_async:
Expand All @@ -119,7 +119,7 @@ def main():
hub_dict['opt_kwargs']['extensions'] = MultiExtension # DLW: ???? (seems to not matter)

#gradient extension kwargs
if cfg.grad_rho_setter:
if cfg.grad_rho:
ext_classes.append(Gradient_extension)
hub_dict['opt_kwargs']['options']['gradient_extension_options'] = {'cfg': cfg}

Expand Down
4 changes: 2 additions & 2 deletions examples/uc/gradient_uc_cylinders.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def main():
if cfg.xhat_closest_tree:
ext_classes.append(XhatClosest)

if cfg.grad_rho_setter:
if cfg.grad_rho:
ext_classes.append(Gradient_extension)

hub_dict["opt_kwargs"]["extension_kwargs"] = {"ext_classes" : ext_classes}
Expand All @@ -143,7 +143,7 @@ def main():
"keep_solution" : True
}

if cfg.grad_rho_setter:
if cfg.grad_rho:
hub_dict['opt_kwargs']['options']['gradient_extension_options'] = {'cfg': cfg}

if cfg.ph_mipgaps_json is not None:
Expand Down
3 changes: 1 addition & 2 deletions mpisppy/extensions/gradient_extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ def __init__(self, opt, comm=None):
# TBD: stop using files
# TBD: restore the rho_setter?
self.cfg_args_cache = {'rho_file_in': self.cfg.rho_file_in,
'grad_rho_file_out': self.cfg.grad_rho_file_out,
'rho_setter': self.cfg.grad_rho_setter}
'grad_rho_file_out': self.cfg.grad_rho_file_out}
if self.cfg.get('grad_cost_file_out', ifmissing="") == "":
self.cfg.grad_cost_file_out = './_temp_grad_cost_file.csv'
# else:
Expand Down
2 changes: 1 addition & 1 deletion mpisppy/generic_cylinders.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def _do_decomp(module, cfg, scenario_creator, scenario_creator_kwargs, scenario_
if cfg.rc_fixer:
vanilla.add_reduced_costs_fixer(hub_dict, cfg)

if cfg.grad_rho_setter:
if cfg.grad_rho:
ext_classes.append(Gradient_extension)
hub_dict['opt_kwargs']['options']['gradient_extension_options'] = {'cfg': cfg}

Expand Down
2 changes: 1 addition & 1 deletion mpisppy/tests/test_gradient_rho.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def test_compute_and_write_grad_rho(self):
"""

def test_rho_setter(self):
self.cfg.grad_rho_setter = True
self.cfg.grad_rho = True
self.cfg.rho_file_in = './examples/rho_test_data/rho.csv'
self.rho_object = find_rho.Find_Rho(self.ph_object, self.cfg)
self.set_rho = find_rho.Set_Rho(self.cfg)
Expand Down
12 changes: 6 additions & 6 deletions mpisppy/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,9 @@ def _bad_rho_setters(msg):
raise ValueError("Rho setter options do not make sense together:\n"
f"{msg}")

if self.grad_rho_setter and self.sensi_rho:
if self.grad_rho and self.sensi_rho:
_bad_rho_setters("Only one rho setter can be active.")
if not (self.grad_rho_setter or self.sensi_rho or self.sep_rho or self.reduced_costs_rho):
if not (self.grad_rho or self.sensi_rho or self.sep_rho or self.reduced_costs_rho):
if self.dynamic_rho_primal_crit or self.dynamic_rho_dual_crit:
_bad_rho_setters("dynamic rho only works with grad-, sensi-, and sep-rho")

Expand Down Expand Up @@ -434,7 +434,7 @@ def reduced_costs_rho_args(self):

def sep_rho_args(self):
self.add_to_config("sep_rho",
description="have a SepRho extension",
description="have an extension that computes rho using the seprho method from the Watson/Woodruff CMS paper",
domain=bool,
default=False)
self.add_to_config("sep_rho_multiplier",
Expand All @@ -445,7 +445,7 @@ def sep_rho_args(self):

def sensi_rho_args(self):
self.add_to_config("sensi_rho",
description="have a SensiRho extension",
description="have an extension that sets rho values based on objective function sensitivity",
domain=bool,
default=False)
self.add_to_config("sensi_rho_multiplier",
Expand Down Expand Up @@ -833,8 +833,8 @@ def gradient_args(self):
# domain=float,
# default=0.1)

self.add_to_config('grad_rho_setter',
description="use rho setter from a rho file",
self.add_to_config('grad_rho',
description="use a gradient-based rho setter (if your problem is linear, use coeff-rho instead)",
domain=bool,
default=False)
"""
Expand Down

0 comments on commit a52263c

Please sign in to comment.