From afef8bfcf32239b8689734872c816da27fa00aa8 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:15:12 -0700 Subject: [PATCH 1/8] Add wrapper to propagate flush through StremHandle pipes --- pyomo/common/tee.py | 53 ++++++++++++++++++++++++++++++++++ pyomo/common/tests/test_tee.py | 2 ++ 2 files changed, 55 insertions(+) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index 500f7b6f58d..461ff7c15ed 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -50,6 +50,32 @@ logger = logging.getLogger(__name__) +class _SignalFlush(object): + def __init__(self, ostream, handle): + super().__setattr__('_ostream', ostream) + super().__setattr__('_handle', handle) + + def flush(self): + self._ostream.flush() + self._handle.flush = True + + def __getattr__(self, attr): + return getattr(self._ostream, attr) + + def __setattr__(self, attr,val): + return setattr(self._ostream, attr, val) + + +class _AutoFlush(_SignalFlush): + def write(self, data): + self._ostream.write(data) + self.flush() + + def writelines(self, data): + self._ostream.writelines(data) + self.flush() + + class redirect_fd(object): """Redirect a file descriptor to a new file or file descriptor. @@ -220,6 +246,7 @@ class _StreamHandle(object): def __init__(self, mode, buffering, encoding, newline): self.buffering = buffering self.newlines = newline + self.flush = False self.read_pipe, self.write_pipe = os.pipe() if not buffering and 'b' not in mode: # While we support "unbuffered" behavior in text mode, @@ -233,6 +260,13 @@ def __init__(self, mode, buffering, encoding, newline): newline=newline, closefd=False, ) + if not self.buffering and buffering: + # We want this stream to be unbuffered, but Python doesn't + # allow it for text strreams. Mock up an unbuffered stream + # using AutoFlush + self.write_file = _AutoFlush(self.write_file, self) + else: + self.write_file = _SignalFlush(self.write_file, self) self.decoder_buffer = b'' try: self.encoding = encoding or self.write_file.encoding @@ -452,7 +486,11 @@ def _start(self, handle): pass def _streamReader(self, handle): + flush = False while True: + if handle.flush: + flush = True + handle.flush = False new_data = os.read(handle.read_pipe, io.DEFAULT_BUFFER_SIZE) if not new_data: break @@ -463,6 +501,11 @@ def _streamReader(self, handle): handle.decodeIncomingBuffer() # Now, output whatever we have decoded to the output streams handle.writeOutputBuffer(self.ostreams) + if flush: + flush = False + if self.buffering: + for s in self.ostreams: + s.flush() # # print("STREAM READER: DONE") @@ -489,9 +532,13 @@ def _mergedReader(self): _fast_poll_ct = _poll_rampup else: new_data = None + flush = False if _mswindows: for handle in list(handles): try: + if handle.flush: + flush = True + handle.flush = False pipe = get_osfhandle(handle.read_pipe) numAvail = PeekNamedPipe(pipe, 0)[1] if numAvail: @@ -520,6 +567,9 @@ def _mergedReader(self): continue handle = ready_handles[0] + if handle.flush: + flush = True + handle.flush = False new_data = os.read(handle.read_pipe, io.DEFAULT_BUFFER_SIZE) if not new_data: handles.remove(handle) @@ -532,5 +582,8 @@ def _mergedReader(self): # Now, output whatever we have decoded to the output streams handle.writeOutputBuffer(self.ostreams) + if flush and self.buffering: + for s in self.ostreams: + s.flush() # # print("MERGED READER: DONE") diff --git a/pyomo/common/tests/test_tee.py b/pyomo/common/tests/test_tee.py index 2b2a418d63d..0d5e7549c20 100644 --- a/pyomo/common/tests/test_tee.py +++ b/pyomo/common/tests/test_tee.py @@ -197,6 +197,8 @@ def test_deadlock(self): class MockStream(object): def write(self, data): time.sleep(0.2) + def flush(self): + pass _save = tee._poll_timeout, tee._poll_timeout_deadlock tee._poll_timeout = tee._poll_interval * 2**5 # 0.0032 From 0bd150ea3d67627b498c89c52d9b4762d31b6a35 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:16:10 -0700 Subject: [PATCH 2/8] Support passing TeeStream to capture_output --- pyomo/common/tee.py | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index 461ff7c15ed..6e87deae863 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -178,10 +178,33 @@ def __exit__(self, t, v, traceback): class capture_output(object): - """ - Drop-in substitute for PyUtilib's capture_output. - Takes in a StringIO, file-like object, or filename and temporarily - redirects output to a string buffer. + """Context manager to capture output sent to sys.stdout and sys.stderr + + This is a drop-in substitute for PyUtilib's capture_output to + temporarily redirect output to the provided stream or file. + + Parameters + ---------- + output : io.TextIOBase, TeeStream, str, or None + + Output stream where all captured stdout/stderr data is sent. If + a ``str`` is provided, it is used as a file name and opened + (potentially overwriting any existing file). If ``None``, a + :class:`io.StringIO` object is created and used. + + capture_fd : bool + + If True, we will also redirect the low-level file descriptors + associated with stdout (1) and stderr (2) to the ``output``. + This is useful for capturing output emitted directly to the + process stdout / stderr by external compiled modules. + + Returns + ------- + io.TextIOBase + + This is the output stream object where all data is sent. + """ def __init__(self, output=None, capture_fd=False): @@ -195,12 +218,15 @@ def __init__(self, output=None, capture_fd=False): self.fd_redirect = None def __enter__(self): + self.old = (sys.stdout, sys.stderr) if isinstance(self.output, str): self.output_stream = open(self.output, 'w') else: self.output_stream = self.output - self.old = (sys.stdout, sys.stderr) - self.tee = TeeStream(self.output_stream) + if isinstance(self.output, TeeStream): + self.tee = self.output + else: + self.tee = TeeStream(self.output_stream) self.tee.__enter__() sys.stdout = self.tee.STDOUT sys.stderr = self.tee.STDERR From d922342ff4273b3ff2b2a982d4787a85c3672ef3 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:34:41 -0700 Subject: [PATCH 3/8] TeeStream: duplicate target file descriptors to avoid conflicts with capture_output --- pyomo/common/tee.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index 6e87deae863..898d4c00ad8 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -401,13 +401,22 @@ def writeOutputBuffer(self, ostreams): class TeeStream(object): def __init__(self, *ostreams, encoding=None): - self.ostreams = ostreams + self.user_ostreams = ostreams + self.ostreams = [] self.encoding = encoding self._stdout = None self._stderr = None self._handles = [] self._active_handles = [] self._threads = [] + for s in ostreams: + try: + fileno = s.fileno() + except: + self.ostreams.append(s) + continue + s = os.fdopen(os.dup(fileno), mode=getattr(s, 'mode', None), closefd=True) + self.ostreams.append(s) @property def STDOUT(self): @@ -482,6 +491,9 @@ def close(self, in_exception=False): self._active_handles.clear() self._stdout = None self._stderr = None + for orig, local in zip(self.user_ostreams, self.ostreams): + if orig is not local: + local.close() def __enter__(self): return self From 1a4512f4c244c720ac2f65e9452914e35c272e54 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:40:30 -0700 Subject: [PATCH 4/8] TeeStream: suppirtsetting buffering in internal out/err streams --- pyomo/common/tee.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index 898d4c00ad8..07857ab8002 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -370,7 +370,7 @@ def decodeIncomingBuffer(self): def writeOutputBuffer(self, ostreams): if not self.encoding: ostring, self.output_buffer = self.output_buffer, b'' - elif self.buffering == 1: + elif self.buffering > 0: EOL = self.output_buffer.rfind(self.newlines or '\n') + 1 ostring = self.output_buffer[:EOL] self.output_buffer = self.output_buffer[EOL:] @@ -400,10 +400,11 @@ def writeOutputBuffer(self, ostreams): class TeeStream(object): - def __init__(self, *ostreams, encoding=None): + def __init__(self, *ostreams, encoding=None, buffering=-1): self.user_ostreams = ostreams self.ostreams = [] self.encoding = encoding + self.buffering = buffering self._stdout = None self._stderr = None self._handles = [] @@ -421,13 +422,19 @@ def __init__(self, *ostreams, encoding=None): @property def STDOUT(self): if self._stdout is None: - self._stdout = self.open(buffering=1) + b = self.buffering + if b == -1: + b = 1 + self._stdout = self.open(buffering=b) return self._stdout @property def STDERR(self): if self._stderr is None: - self._stderr = self.open(buffering=0) + b = self.buffering + if b == -1: + b = 0 + self._stderr = self.open(buffering=b) return self._stderr def open(self, mode='w', buffering=-1, encoding=None, newline=None): From 733cd75aaaec32060b322a5bf442cbd85e990784 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:42:48 -0700 Subject: [PATCH 5/8] capture_output: prevent duplicate output redirection --- pyomo/common/tee.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index 07857ab8002..de3f1230e88 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -232,8 +232,8 @@ def __enter__(self): sys.stderr = self.tee.STDERR if self.capture_fd: self.fd_redirect = ( - redirect_fd(1, sys.stdout.fileno()), - redirect_fd(2, sys.stderr.fileno()), + redirect_fd(1, self.tee.STDOUT.fileno(), synchronize=False), + redirect_fd(2, self.tee.STDERR.fileno(), synchronize=False), ) self.fd_redirect[0].__enter__() self.fd_redirect[1].__enter__() From e68ffe34dd6c9580aa4ca17ec4653b234c1c31e1 Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 04:43:56 -0700 Subject: [PATCH 6/8] NFC: apply black --- pyomo/common/tee.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyomo/common/tee.py b/pyomo/common/tee.py index de3f1230e88..27faa7f2194 100644 --- a/pyomo/common/tee.py +++ b/pyomo/common/tee.py @@ -62,7 +62,7 @@ def flush(self): def __getattr__(self, attr): return getattr(self._ostream, attr) - def __setattr__(self, attr,val): + def __setattr__(self, attr, val): return setattr(self._ostream, attr, val) From dbe3c254d4f01dd541b680dc281a5ff54dd46b0a Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 06:36:26 -0700 Subject: [PATCH 7/8] Add tests for buffering in TeeStream/capture_output --- pyomo/common/tests/test_tee.py | 182 +++++++++++++++++++++++++++++++++ 1 file changed, 182 insertions(+) diff --git a/pyomo/common/tests/test_tee.py b/pyomo/common/tests/test_tee.py index 0d5e7549c20..7077a71dc5f 100644 --- a/pyomo/common/tests/test_tee.py +++ b/pyomo/common/tests/test_tee.py @@ -11,6 +11,7 @@ # ___________________________________________________________________________ import gc +import itertools import os import time import sys @@ -23,6 +24,40 @@ import pyomo.common.tee as tee +class timestamper: + """A 'TextIO'-like object that records the time when data was written to + the stream.""" + + def __init__(self): + self.buf = [] + + def write(self, data): + for line in data.splitlines(): + self.buf.append((time.time(), float(line.strip()))) + + def writelines(self, data): + for line in data: + self.write(line.strip()) + + def flush(self): + pass + + def check(self, test, base): + """Map the recorded times to {0, 1} based on the range of times + recorded: anything in the first half of the range is mapped to + 0, and anything in the second half is mapped to 1. This + "discretizes" the times so that we can reliably compare to + baselines. + + """ + + n = list(itertools.chain(*self.buf)) + mid = (min(n) + max(n)) / 2.0 + result = [tuple(0 if i < mid else 1 for i in _) for _ in self.buf] + if result != base: + test.fail(f"result {result} != baseline {base}\nRaw timing: {self.buf}") + + class TestTeeStream(unittest.TestCase): def setUp(self): self.reenable_gc = gc.isenabled() @@ -197,6 +232,7 @@ def test_deadlock(self): class MockStream(object): def write(self, data): time.sleep(0.2) + def flush(self): pass @@ -219,6 +255,152 @@ def flush(self): finally: tee._poll_timeout, tee._poll_timeout_deadlock = _save + def _buffering_stdout(self, fd): + # Test 1: short messages to STDOUT are buffered + # + # TODO: [JDS] If we are capturing the file descriptor, the + # stdout channel is no longer buffered. I am not exactly sure + # why (my guess is because the underlying pipe is not buffered), + # but as it is generally not a problem to not buffer, we will + # put off "fixing" it. + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stdout.write(f"{time.time()}\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (1 - int(fd), 0), (1 - int(fd), 0), (1, 1)]) + + # Test 2: short messages to STDOUT that are flushed are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stdout.write(f"{time.time()}\n") + sys.stdout.flush() + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 3: long messages to STDOUT fill the buffer and are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stdout.write(f"{time.time()}" + ' ' * 4096 + "\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 4: short messages captured directly to TeeStream are not + # buffered. + # + # TODO: [JDS] I am not exactly sure why this is not buffered (my + # guess is because the underlying pipe is not buffered), but as + # it is generally not a problem to not buffer, we will put off + # "fixing" it. + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stdout.write(f"{time.time()}\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 5: short messages captured directly to TeeStream that are + # flushed are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stdout.write(f"{time.time()}\n") + sys.stdout.flush() + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 6: long messages captured directly to TeeStream fill the + # buffer and are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stdout.write(f"{time.time()}" + ' ' * 4096 + "\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + def test_buffering_stdout(self): + self._buffering_stdout(False) + + def test_buffering_stdout_capture_fd(self): + self._buffering_stdout(True) + + def _buffering_stderr(self, fd): + # Test 1: short messages to STDERR are buffered, unless we are + # capturing the underlying file descriptor, in which case they + # are buffered. + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stderr.write(f"{time.time()}\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 2: short messages to STDERR that are flushed are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stderr.write(f"{time.time()}\n") + sys.stderr.flush() + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 3: long messages to STDERR fill the buffer and are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.TeeStream(ts, ts) as t, tee.capture_output(t.STDOUT, capture_fd=fd): + sys.stderr.write(f"{time.time()}" + ' ' * 4096 + "\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 4: short messages captured directly to TeeStream are not + # buffered, unless we are capturing the underlying file + # descriptor, in which case they are buffered. + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stderr.write(f"{time.time()}\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 5: short messages captured directly to TeeStream that are + # flushed are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stderr.write(f"{time.time()}\n") + sys.stderr.flush() + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + # Test 6: long messages captured directly to TeeStream fill the + # buffer and are flushed + ts = timestamper() + ts.write(f"{time.time()}") + with tee.capture_output(tee.TeeStream(ts, ts), capture_fd=fd): + sys.stderr.write(f"{time.time()}" + ' ' * 4096 + "\n") + time.sleep(0.1) + ts.write(f"{time.time()}") + ts.check(self, [(0, 0), (0, 0), (0, 0), (1, 1)]) + + def test_buffering_stderr(self): + self._buffering_stderr(False) + + def test_buffering_stderr_capture_fd(self): + self._buffering_stderr(True) + class TestFileDescriptor(unittest.TestCase): def setUp(self): From be90f1a8bc58bfd6bfc55d4b997829a4e99bdc2f Mon Sep 17 00:00:00 2001 From: John Siirola Date: Wed, 18 Dec 2024 06:59:31 -0700 Subject: [PATCH 8/8] Standardize use of TeeStream / capture_output --- pyomo/contrib/appsi/solvers/gurobi.py | 29 ++-- pyomo/contrib/appsi/solvers/highs.py | 54 +++---- pyomo/contrib/appsi/solvers/maingo.py | 47 +++--- .../algorithms/solvers/cyipopt_solver.py | 40 +---- pyomo/contrib/solver/gurobi.py | 2 +- pyomo/contrib/solver/gurobi_direct.py | 2 +- pyomo/solvers/plugins/solvers/GUROBI.py | 13 +- pyomo/solvers/plugins/solvers/SAS.py | 147 +++++++++--------- 8 files changed, 147 insertions(+), 187 deletions(-) diff --git a/pyomo/contrib/appsi/solvers/gurobi.py b/pyomo/contrib/appsi/solvers/gurobi.py index 2719ecc2a00..10f3c5bf62c 100644 --- a/pyomo/contrib/appsi/solvers/gurobi.py +++ b/pyomo/contrib/appsi/solvers/gurobi.py @@ -367,25 +367,24 @@ def _solve(self, timer: HierarchicalTimer): if self.config.stream_solver: ostreams.append(sys.stdout) - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=False): - config = self.config - options = self.gurobi_options + with capture_output(output=TeeStream(*ostreams), capture_fd=False): + config = self.config + options = self.gurobi_options - self._solver_model.setParam('LogToConsole', 1) - self._solver_model.setParam('LogFile', config.logfile) + self._solver_model.setParam('LogToConsole', 1) + self._solver_model.setParam('LogFile', config.logfile) - if config.time_limit is not None: - self._solver_model.setParam('TimeLimit', config.time_limit) - if config.mip_gap is not None: - self._solver_model.setParam('MIPGap', config.mip_gap) + if config.time_limit is not None: + self._solver_model.setParam('TimeLimit', config.time_limit) + if config.mip_gap is not None: + self._solver_model.setParam('MIPGap', config.mip_gap) - for key, option in options.items(): - self._solver_model.setParam(key, option) + for key, option in options.items(): + self._solver_model.setParam(key, option) - timer.start('optimize') - self._solver_model.optimize(self._callback) - timer.stop('optimize') + timer.start('optimize') + self._solver_model.optimize(self._callback) + timer.stop('optimize') self._needs_updated = False return self._postsolve(timer) diff --git a/pyomo/contrib/appsi/solvers/highs.py b/pyomo/contrib/appsi/solvers/highs.py index 57a7b1eac72..f9f2c759459 100644 --- a/pyomo/contrib/appsi/solvers/highs.py +++ b/pyomo/contrib/appsi/solvers/highs.py @@ -233,22 +233,23 @@ def _solve(self, timer: HierarchicalTimer): if self.config.stream_solver: ostreams.append(sys.stdout) - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=True): - self._solver_model.setOptionValue('log_to_console', True) - if config.logfile != '': - self._solver_model.setOptionValue('log_file', config.logfile) - - if config.time_limit is not None: - self._solver_model.setOptionValue('time_limit', config.time_limit) - if config.mip_gap is not None: - self._solver_model.setOptionValue('mip_rel_gap', config.mip_gap) - - for key, option in options.items(): - self._solver_model.setOptionValue(key, option) - timer.start('optimize') - self._solver_model.run() - timer.stop('optimize') + with capture_output(output=TeeStream(*ostreams), capture_fd=True): + self._solver_model.setOptionValue('log_to_console', True) + if config.logfile != '': + self._solver_model.setOptionValue('log_file', config.logfile) + + if config.time_limit is not None: + self._solver_model.setOptionValue('time_limit', config.time_limit) + if config.mip_gap is not None: + self._solver_model.setOptionValue('mip_rel_gap', config.mip_gap) + + for key, option in options.items(): + self._solver_model.setOptionValue(key, option) + timer.start('optimize') + ostreams[-1].write("RUN!\n") + self._solver_model.HandleKeyboardInterrupt = True + self._solver_model.run() + timer.stop('optimize') return self._postsolve(timer) @@ -372,17 +373,16 @@ def set_instance(self, model): ] if self.config.stream_solver: ostreams.append(sys.stdout) - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=True): - self._reinit() - self._model = model - if self.use_extensions and cmodel_available: - self._expr_types = cmodel.PyomoExprTypes() - - self._solver_model = highspy.Highs() - self.add_block(model) - if self._objective is None: - self.set_objective(None) + with capture_output(output=TeeStream(*ostreams), capture_fd=True): + self._reinit() + self._model = model + if self.use_extensions and cmodel_available: + self._expr_types = cmodel.PyomoExprTypes() + + self._solver_model = highspy.Highs() + self.add_block(model) + if self._objective is None: + self.set_objective(None) def _add_constraints(self, cons: List[ConstraintData]): self._sol = None diff --git a/pyomo/contrib/appsi/solvers/maingo.py b/pyomo/contrib/appsi/solvers/maingo.py index c5860b42ce7..062ea09004e 100644 --- a/pyomo/contrib/appsi/solvers/maingo.py +++ b/pyomo/contrib/appsi/solvers/maingo.py @@ -217,30 +217,29 @@ def _solve(self, timer: HierarchicalTimer): if self.config.stream_solver: ostreams.append(sys.stdout) - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=False): - config = self.config - options = self.maingo_options - - self._mymaingo = maingopy.MAiNGO(self._solver_model) - - self._mymaingo.set_option("loggingDestination", 2) - self._mymaingo.set_log_file_name(config.logfile) - self._mymaingo.set_option("epsilonA", config.tolerances.epsilonA) - self._mymaingo.set_option("epsilonR", config.tolerances.epsilonR) - self._mymaingo.set_option("deltaEq", config.tolerances.deltaEq) - self._mymaingo.set_option("deltaIneq", config.tolerances.deltaIneq) - - if config.time_limit is not None: - self._mymaingo.set_option("maxTime", config.time_limit) - if config.mip_gap is not None: - self._mymaingo.set_option("epsilonR", config.mip_gap) - for key, option in options.items(): - self._mymaingo.set_option(key, option) - - timer.start("MAiNGO solve") - self._mymaingo.solve() - timer.stop("MAiNGO solve") + with capture_output(output=TeeStream(*ostreams), capture_fd=False): + config = self.config + options = self.maingo_options + + self._mymaingo = maingopy.MAiNGO(self._solver_model) + + self._mymaingo.set_option("loggingDestination", 2) + self._mymaingo.set_log_file_name(config.logfile) + self._mymaingo.set_option("epsilonA", config.tolerances.epsilonA) + self._mymaingo.set_option("epsilonR", config.tolerances.epsilonR) + self._mymaingo.set_option("deltaEq", config.tolerances.deltaEq) + self._mymaingo.set_option("deltaIneq", config.tolerances.deltaIneq) + + if config.time_limit is not None: + self._mymaingo.set_option("maxTime", config.time_limit) + if config.mip_gap is not None: + self._mymaingo.set_option("epsilonR", config.mip_gap) + for key, option in options.items(): + self._mymaingo.set_option(key, option) + + timer.start("MAiNGO solve") + self._mymaingo.solve() + timer.stop("MAiNGO solve") return self._postsolve(timer) diff --git a/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py b/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py index 0999550711c..9d627bf9f6d 100644 --- a/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py +++ b/pyomo/contrib/pynumero/algorithms/solvers/cyipopt_solver.py @@ -23,7 +23,7 @@ from pyomo.common.deprecation import relocated_module_attribute from pyomo.common.dependencies import attempt_import, numpy as np, numpy_available -from pyomo.common.tee import redirect_fd, TeeStream +from pyomo.common.tee import capture_output from pyomo.common.modeling import unique_component_name from pyomo.core.base.objective import Objective @@ -228,23 +228,8 @@ def solve(self, x0=None, tee=False): for k, v in self._options.items(): add_option(k, v) - # We preemptively set up the TeeStream, even if we aren't - # going to use it: the implementation is such that the - # context manager does nothing (i.e., doesn't start up any - # processing threads) until after a client accesses - # STDOUT/STDERR - with TeeStream(sys.stdout) as _teeStream: - if tee: - try: - fd = sys.stdout.fileno() - except (io.UnsupportedOperation, AttributeError): - # If sys,stdout doesn't have a valid fileno, - # then create one using the TeeStream - fd = _teeStream.STDOUT.fileno() - else: - fd = None - with redirect_fd(fd=1, output=fd, synchronize=False): - x, info = cyipopt_solver.solve(xstart) + with capture_output(sys.stdout if tee else None, capture_fd=True): + x, info = cyipopt_solver.solve(xstart) return x, info @@ -394,23 +379,8 @@ def solve(self, model, **kwds): timer = TicTocTimer() try: - # We preemptively set up the TeeStream, even if we aren't - # going to use it: the implementation is such that the - # context manager does nothing (i.e., doesn't start up any - # processing threads) until after a client accesses - # STDOUT/STDERR - with TeeStream(sys.stdout) as _teeStream: - if config.tee: - try: - fd = sys.stdout.fileno() - except (io.UnsupportedOperation, AttributeError): - # If sys,stdout doesn't have a valid fileno, - # then create one using the TeeStream - fd = _teeStream.STDOUT.fileno() - else: - fd = None - with redirect_fd(fd=1, output=fd, synchronize=False): - x, info = cyipopt_solver.solve(problem.x_init()) + with capture_output(sys.stdout if config.tee else None, capture_fd=True): + x, info = cyipopt_solver.solve(problem.x_init()) solverStatus = SolverStatus.ok except: msg = "Exception encountered during cyipopt solve:" diff --git a/pyomo/contrib/solver/gurobi.py b/pyomo/contrib/solver/gurobi.py index 10d8120c8b3..1fdaed98a15 100644 --- a/pyomo/contrib/solver/gurobi.py +++ b/pyomo/contrib/solver/gurobi.py @@ -327,7 +327,7 @@ def _solve(self): timer = config.timer ostreams = [io.StringIO()] + config.tee - with TeeStream(*ostreams) as t, capture_output(t.STDOUT, capture_fd=False): + with capture_output(TeeStream(*ostreams), capture_fd=False): options = config.solver_options self._solver_model.setParam('LogToConsole', 1) diff --git a/pyomo/contrib/solver/gurobi_direct.py b/pyomo/contrib/solver/gurobi_direct.py index fa67e243a4f..04561ea5b1a 100644 --- a/pyomo/contrib/solver/gurobi_direct.py +++ b/pyomo/contrib/solver/gurobi_direct.py @@ -278,7 +278,7 @@ def solve(self, model, **kwds) -> Results: orig_cwd = os.getcwd() if config.working_dir: os.chdir(config.working_dir) - with TeeStream(*ostreams) as t, capture_output(t.STDOUT, capture_fd=False): + with capture_output(TeeStream(*ostreams), capture_fd=False): gurobi_model = gurobipy.Model() timer.start('transfer_model') diff --git a/pyomo/solvers/plugins/solvers/GUROBI.py b/pyomo/solvers/plugins/solvers/GUROBI.py index 2a6ee8b676e..f31fe7924a6 100644 --- a/pyomo/solvers/plugins/solvers/GUROBI.py +++ b/pyomo/solvers/plugins/solvers/GUROBI.py @@ -684,15 +684,10 @@ def _apply_solver(self): ostreams = [io.StringIO()] if self._tee: ostreams.append(sys.stdout) - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=False): - self._soln = GUROBI_RUN.gurobi_run( - problem_filename, - warmstart_filename, - None, - options_dict, - self._suffixes, - ) + with capture_output(output=TeeStream(*ostreams), capture_fd=False): + self._soln = GUROBI_RUN.gurobi_run( + problem_filename, warmstart_filename, None, options_dict, self._suffixes + ) self._log = ostreams[0].getvalue() self._rc = 0 sys.stdout.flush() diff --git a/pyomo/solvers/plugins/solvers/SAS.py b/pyomo/solvers/plugins/solvers/SAS.py index d7b09e29fde..cfcb2c2a0ad 100644 --- a/pyomo/solvers/plugins/solvers/SAS.py +++ b/pyomo/solvers/plugins/solvers/SAS.py @@ -728,83 +728,80 @@ def _apply_solver(self): ostreams.append(sys.stdout) # Connect to CAS server - with TeeStream(*ostreams) as t: - with capture_output(output=t.STDOUT, capture_fd=False): - s = self._sas_session - if s == None: - s = self._sas_session = self._sas.CAS(**self._session_options) - try: - # Load the optimization action set - s.loadactionset("optimization") - - mpsdata_table_name = self._uploadMpsFile(s, unique) - - primalin_table_name = None - if self.warmstart_flag: - primalin_table_name = self._uploadPrimalin(s, unique) - - # Define output table names - primalout_table_name = "pout" + unique - dualout_table_name = None - - # Solve the problem in CAS - if action == "solveMilp": - r = s.optimization.solveMilp( - data={"name": mpsdata_table_name}, - primalOut={"name": primalout_table_name, "replace": True}, - **self.options - ) - else: - dualout_table_name = "dout" + unique - r = s.optimization.solveLp( - data={"name": mpsdata_table_name}, - primalOut={"name": primalout_table_name, "replace": True}, - dualOut={"name": dualout_table_name, "replace": True}, - **self.options - ) - - # Prepare the solver results - if r: - # Get back the primal and dual solution data sets - results = self.results = self._create_results_from_status( - r.get("status", "ERROR"), r.get("solutionStatus", "ERROR") - ) - - if results.solver.status != SolverStatus.error: - if r.ProblemSummary["cValue1"][1] == "Maximization": - results.problem.sense = ProblemSense.maximize - else: - results.problem.sense = ProblemSense.minimize - - # Prepare the solution information - if results.solver.hasSolution: - self._retrieveSolution( - s, - r, - results, - action, - primalout_table_name, - dualout_table_name, - ) + with capture_output(output=TeeStream(*ostreams), capture_fd=False): + s = self.start_sas_session() + try: + # Load the optimization action set + s.loadactionset("optimization") + + mpsdata_table_name = self._uploadMpsFile(s, unique) + + primalin_table_name = None + if self.warmstart_flag: + primalin_table_name = self._uploadPrimalin(s, unique) + + # Define output table names + primalout_table_name = "pout" + unique + dualout_table_name = None + + # Solve the problem in CAS + if action == "solveMilp": + r = s.optimization.solveMilp( + data={"name": mpsdata_table_name}, + primalOut={"name": primalout_table_name, "replace": True}, + **self.options + ) + else: + dualout_table_name = "dout" + unique + r = s.optimization.solveLp( + data={"name": mpsdata_table_name}, + primalOut={"name": primalout_table_name, "replace": True}, + dualOut={"name": dualout_table_name, "replace": True}, + **self.options + ) + + # Prepare the solver results + if r: + # Get back the primal and dual solution data sets + results = self.results = self._create_results_from_status( + r.get("status", "ERROR"), r.get("solutionStatus", "ERROR") + ) + + if results.solver.status != SolverStatus.error: + if r.ProblemSummary["cValue1"][1] == "Maximization": + results.problem.sense = ProblemSense.maximize else: - raise ValueError("The SAS solver returned an error status.") + results.problem.sense = ProblemSense.minimize + + # Prepare the solution information + if results.solver.hasSolution: + self._retrieveSolution( + s, + r, + results, + action, + primalout_table_name, + dualout_table_name, + ) else: - results = self.results = SolverResults() - results.solver.name = "SAS" - results.solver.status = SolverStatus.error - raise ValueError( - "An option passed to the SAS solver caused a syntax error." - ) - - finally: - if mpsdata_table_name: - s.dropTable(name=mpsdata_table_name, quiet=True) - if primalin_table_name: - s.dropTable(name=primalin_table_name, quiet=True) - if primalout_table_name: - s.dropTable(name=primalout_table_name, quiet=True) - if dualout_table_name: - s.dropTable(name=dualout_table_name, quiet=True) + raise ValueError("The SAS solver returned an error status.") + else: + results = self.results = SolverResults() + results.solver.name = "SAS" + results.solver.status = SolverStatus.error + raise ValueError( + "An option passed to the SAS solver caused a syntax error." + ) + + finally: + if mpsdata_table_name: + s.dropTable(name=mpsdata_table_name, quiet=True) + if primalin_table_name: + s.dropTable(name=primalin_table_name, quiet=True) + if primalout_table_name: + s.dropTable(name=primalout_table_name, quiet=True) + if dualout_table_name: + s.dropTable(name=dualout_table_name, quiet=True) self._log = self._log.getvalue() self._rc = 0