Skip to content

Commit

Permalink
issue #687 rename log_error to show_error logs
Browse files Browse the repository at this point in the history
  • Loading branch information
ElienVandermaesenVITO committed Dec 19, 2024
1 parent 6f3decb commit 140dbd2
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 10 deletions.
6 changes: 3 additions & 3 deletions openeo/rest/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -2477,7 +2477,7 @@ def execute_batch(
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
auto_add_save_result: bool = True,
log_error=True,
show_error_logs: bool = True,
# TODO: deprecate `format_options` as keyword arguments
**format_options,
) -> BatchJob:
Expand All @@ -2495,7 +2495,7 @@ def execute_batch(
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
(overruling the connection's ``auto_validate`` setting).
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
:param log_error: whether to print error logs
:param show_error_logs: whether to automatically print error logs when the batch job failed.
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
Expand Down Expand Up @@ -2532,7 +2532,7 @@ def execute_batch(
return job.run_synchronous(
outputfile=outputfile,
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
log_error=log_error
show_error_logs=show_error_logs
)

def create_job(
Expand Down
12 changes: 6 additions & 6 deletions openeo/rest/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def logs(

def run_synchronous(
self, outputfile: Union[str, Path, None] = None,
print=print, max_poll_interval=60, connection_retry_interval=30, log_error=True
print=print, max_poll_interval=60, connection_retry_interval=30, show_error_logs: bool = True
) -> BatchJob:
"""
Start the job, wait for it to finish and download result
Expand All @@ -245,12 +245,12 @@ def run_synchronous(
:param print: print/logging function to show progress/status
:param max_poll_interval: maximum number of seconds to sleep between status polls
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
:param log_error: whether to print error logs
:param show_error_logs: whether to automatically print error logs when the batch job failed.
:return:
"""
self.start_and_wait(
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval,
log_error=log_error
show_error_logs=show_error_logs
)
# TODO #135 support multi file result sets too?
if outputfile is not None:
Expand All @@ -259,7 +259,7 @@ def run_synchronous(

def start_and_wait(
self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10,
log_error=True
show_error_logs: bool = True
) -> BatchJob:
"""
Start the batch job, poll its status and wait till it finishes (or fails)
Expand All @@ -268,7 +268,7 @@ def start_and_wait(
:param max_poll_interval: maximum number of seconds to sleep between status polls
:param connection_retry_interval: how long to wait when status poll failed due to connection issue
:param soft_error_max: maximum number of soft errors (e.g. temporary connection glitches) to allow
:param log_error: whether to print error logs
:param show_error_logs: whether to automatically print error logs when the batch job failed.
:return:
"""
# TODO rename `connection_retry_interval` to something more generic?
Expand Down Expand Up @@ -327,7 +327,7 @@ def soft_error(message: str):

if status != "finished":
# TODO: render logs jupyter-aware in a notebook context?
if log_error:
if show_error_logs:
print(f"Your batch job {self.job_id!r} failed. Error logs:")
print(self.logs(level=logging.ERROR))
print(
Expand Down
2 changes: 1 addition & 1 deletion tests/rest/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def test_execute_batch_with_error_with_error_logs_disabled(con100, requests_mock
with fake_time():
con100.load_collection("SENTINEL2").execute_batch(
outputfile=path, out_format="GTIFF",
max_poll_interval=.1, print=log.append, log_error=False
max_poll_interval=.1, print=log.append, show_error_logs=False
)
pytest.fail("execute_batch should fail")
except JobFailedException as e:
Expand Down

0 comments on commit 140dbd2

Please sign in to comment.