From f38e82c6394ad40c0bd2d0d6839bfce3ae9a4563 Mon Sep 17 00:00:00 2001 From: saranyaloganathan23 Date: Mon, 22 Jul 2024 11:11:20 +0530 Subject: [PATCH] review comments changes --- dataproc_jupyter_plugin/commons/commands.py | 2 +- dataproc_jupyter_plugin/services/airflow.py | 4 ++-- dataproc_jupyter_plugin/services/executor.py | 12 ++++++------ dataproc_jupyter_plugin/tests/test_airflow.py | 2 +- dataproc_jupyter_plugin/tests/test_executor.py | 2 +- src/batches/batchDetails.tsx | 1 - 6 files changed, 11 insertions(+), 12 deletions(-) diff --git a/dataproc_jupyter_plugin/commons/commands.py b/dataproc_jupyter_plugin/commons/commands.py index 7f990136..d38f552b 100644 --- a/dataproc_jupyter_plugin/commons/commands.py +++ b/dataproc_jupyter_plugin/commons/commands.py @@ -19,7 +19,7 @@ import tempfile -async def async_command_executor(cmd): +async def async_run_gsutil_subcommand(cmd): """Run a specified command and return its output.""" with tempfile.TemporaryFile() as t: p = await asyncio.create_subprocess_shell( diff --git a/dataproc_jupyter_plugin/services/airflow.py b/dataproc_jupyter_plugin/services/airflow.py index 55d87887..b77a5803 100644 --- a/dataproc_jupyter_plugin/services/airflow.py +++ b/dataproc_jupyter_plugin/services/airflow.py @@ -17,7 +17,7 @@ import urllib from dataproc_jupyter_plugin import urls -from dataproc_jupyter_plugin.commons.commands import async_command_executor +from dataproc_jupyter_plugin.commons.commands import async_run_gsutil_subcommand from dataproc_jupyter_plugin.commons.constants import ( COMPOSER_SERVICE_NAME, CONTENT_TYPE, @@ -97,7 +97,7 @@ async def delete_job(self, composer_name, dag_id, from_page): self.log.info(response) try: cmd = f"gsutil rm gs://{bucket}/dags/dag_{dag_id}.py" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) return 0 except subprocess.CalledProcessError as error: self.log.exception("Error deleting dag") diff --git a/dataproc_jupyter_plugin/services/executor.py b/dataproc_jupyter_plugin/services/executor.py index 76c2ea4c..bcc0654f 100644 --- a/dataproc_jupyter_plugin/services/executor.py +++ b/dataproc_jupyter_plugin/services/executor.py @@ -24,7 +24,7 @@ from jinja2 import Environment, PackageLoader, select_autoescape from dataproc_jupyter_plugin import urls -from dataproc_jupyter_plugin.commons.commands import async_command_executor +from dataproc_jupyter_plugin.commons.commands import async_run_gsutil_subcommand from dataproc_jupyter_plugin.commons.constants import ( COMPOSER_SERVICE_NAME, CONTENT_TYPE, @@ -87,7 +87,7 @@ async def get_bucket(self, runtime_env): async def check_file_exists(self, bucket, file_path): try: cmd = f"gsutil ls gs://{bucket}/dataproc-notebooks/{file_path}" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) return True except subprocess.CalledProcessError as error: self.log.exception(f"Error checking papermill file: {error.decode()}") @@ -101,7 +101,7 @@ async def upload_papermill_to_gcs(self, gcs_dag_bucket): wrapper_papermill_path = env.get_template("wrapper_papermill.py").filename try: cmd = f"gsutil cp '{wrapper_papermill_path}' gs://{gcs_dag_bucket}/dataproc-notebooks/" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) self.log.info("Papermill file uploaded to gcs successfully") except subprocess.CalledProcessError as error: self.log.exception( @@ -112,7 +112,7 @@ async def upload_papermill_to_gcs(self, gcs_dag_bucket): async def upload_input_file_to_gcs(self, input, gcs_dag_bucket, job_name): try: cmd = f"gsutil cp './{input}' gs://{gcs_dag_bucket}/dataproc-notebooks/{job_name}/input_notebooks/" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) self.log.info("Input file uploaded to gcs successfully") except subprocess.CalledProcessError as error: self.log.exception(f"Error uploading input file to gcs: {error.decode()}") @@ -239,7 +239,7 @@ async def upload_dag_to_gcs(self, job, dag_file, gcs_dag_bucket): file_path = os.path.join(LOCAL_DAG_FILE_LOCATION, dag_file) try: cmd = f"gsutil cp '{file_path}' gs://{gcs_dag_bucket}/dags/" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) self.log.info("Dag file uploaded to gcs successfully") except subprocess.CalledProcessError as error: self.log.exception(f"Error uploading dag file to gcs: {error.decode()}") @@ -280,7 +280,7 @@ async def execute(self, input_data): async def download_dag_output(self, bucket_name, dag_id, dag_run_id): try: cmd = f"gsutil cp 'gs://{bucket_name}/dataproc-output/{dag_id}/output-notebooks/{dag_id}_{dag_run_id}.ipynb' ./" - await async_command_executor(cmd) + await async_run_gsutil_subcommand(cmd) self.log.info("Output notebook file downloaded successfully") return 0 except subprocess.CalledProcessError as error: diff --git a/dataproc_jupyter_plugin/tests/test_airflow.py b/dataproc_jupyter_plugin/tests/test_airflow.py index 7429b716..384036e0 100644 --- a/dataproc_jupyter_plugin/tests/test_airflow.py +++ b/dataproc_jupyter_plugin/tests/test_airflow.py @@ -95,7 +95,7 @@ async def mock_async_command_executor(cmd): ) monkeypatch.setattr(airflow.Client, "get_airflow_uri", mock_get_airflow_uri) - monkeypatch.setattr(airflow, "async_command_executor", mock_async_command_executor) + monkeypatch.setattr(airflow, "async_run_gsutil_subcommand", mock_async_command_executor) monkeypatch.setattr(aiohttp, "ClientSession", MockClientSession) mock_composer = "mock_composer" mock_dag_id = "mock_dag_id" diff --git a/dataproc_jupyter_plugin/tests/test_executor.py b/dataproc_jupyter_plugin/tests/test_executor.py index 7bc6f3b5..7c541495 100644 --- a/dataproc_jupyter_plugin/tests/test_executor.py +++ b/dataproc_jupyter_plugin/tests/test_executor.py @@ -105,7 +105,7 @@ async def mock_async_command_executor(cmd): returncode, cmd, output=b"output", stderr=b"error in executing command" ) - monkeypatch.setattr(executor, "async_command_executor", mock_async_command_executor) + monkeypatch.setattr(executor, "async_run_gsutil_subcommand", mock_async_command_executor) monkeypatch.setattr(aiohttp, "ClientSession", MockClientSession) mock_bucket_name = "mock_bucekt" diff --git a/src/batches/batchDetails.tsx b/src/batches/batchDetails.tsx index 5e05c6f8..db359162 100644 --- a/src/batches/batchDetails.tsx +++ b/src/batches/batchDetails.tsx @@ -13,7 +13,6 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ import React, { useState, useEffect, useRef } from 'react';