From cec2bd2378ee9490e7b567f630fc2b97836f48d1 Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Sun, 3 Mar 2024 20:18:43 +0800 Subject: [PATCH] clean C416 rule for codestyle --- doc/gui/examples/charts/treemap-simple.py | 2 +- pyproject.toml | 1 - taipy/core/_entity/_dag.py | 2 +- taipy/core/data/_abstract_sql.py | 2 +- taipy/gui_core/_context.py | 4 ++-- tests/core/data/test_filter_data_node.py | 2 +- tests/core/data/test_generic_data_node.py | 4 ++-- tests/core/job/test_job_manager.py | 12 +++++++----- tests/core/job/test_job_manager_with_sql_repo.py | 2 +- 9 files changed, 16 insertions(+), 15 deletions(-) diff --git a/doc/gui/examples/charts/treemap-simple.py b/doc/gui/examples/charts/treemap-simple.py index 9845e8e0c8..a1df4a22af 100644 --- a/doc/gui/examples/charts/treemap-simple.py +++ b/doc/gui/examples/charts/treemap-simple.py @@ -21,7 +21,7 @@ for i in range(2, n_numbers): fibonacci.append(fibonacci[i - 1] + fibonacci[i - 2]) -data = {"index": [i for i in range(1, n_numbers + 1)], "fibonacci": fibonacci} +data = {"index": list(range(1, n_numbers + 1)), "fibonacci": fibonacci} page = """ # TreeMap - Simple diff --git a/pyproject.toml b/pyproject.toml index 69ac9430e8..41f47926ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,6 @@ ignore = [ # TODO: to be removed "C405", # Unnecessary list literal - rewrite as a literal "C408", # Unnecessary dict call - rewrite as a literal "C409", # Unnecessary list passed to tuple() - rewrite as a tuple literal - "C416", # Unnecessary `set` comprehension (rewrite using `set()`) ] # Allow fix for all enabled rules (when `--fix`) is provided. diff --git a/taipy/core/_entity/_dag.py b/taipy/core/_entity/_dag.py index 3729979888..6918dd4754 100644 --- a/taipy/core/_entity/_dag.py +++ b/taipy/core/_entity/_dag.py @@ -31,7 +31,7 @@ def __init__(self, src: _Node, dest: _Node): class _DAG: def __init__(self, dag: nx.DiGraph): - self._sorted_nodes = [nodes for nodes in nx.topological_generations(dag)] + self._sorted_nodes = list(nx.topological_generations(dag)) self._length, self._width = self.__compute_size() self._grid_length, self._grid_width = self.__compute_grid_size() self._nodes = self.__compute_nodes() diff --git a/taipy/core/data/_abstract_sql.py b/taipy/core/data/_abstract_sql.py index 6155f760fa..437ab3b61a 100644 --- a/taipy/core/data/_abstract_sql.py +++ b/taipy/core/data/_abstract_sql.py @@ -230,7 +230,7 @@ def _read_as_pandas_dataframe( # On pandas 1.3.5 there's a bug that makes that the dataframe from sqlalchemy query is # created without headers - keys = [col for col in result.keys()] + keys = list(result.keys()) if columns: return pd.DataFrame(result, columns=keys)[columns] return pd.DataFrame(result, columns=keys) diff --git a/taipy/gui_core/_context.py b/taipy/gui_core/_context.py index ff1958dabc..8af2e8a6fc 100644 --- a/taipy/gui_core/_context.py +++ b/taipy/gui_core/_context.py @@ -124,7 +124,7 @@ def process_event(self, event: Event): if sequence and hasattr(sequence, "parent_ids") and sequence.parent_ids: # type: ignore self.gui._broadcast( _GuiCoreContext._CORE_CHANGED_NAME, - {"scenario": [x for x in sequence.parent_ids]}, # type: ignore + {"scenario": list(sequence.parent_ids)}, # type: ignore ) except Exception as e: _warn(f"Access to sequence {event.entity_id} failed", e) @@ -650,7 +650,7 @@ def __edit_properties(self, entity: t.Union[Scenario, Sequence, DataNode], data: if isinstance(ent, Scenario): tags = data.get(_GuiCoreContext.__PROP_SCENARIO_TAGS) if isinstance(tags, (list, tuple)): - ent.tags = {t for t in tags} + ent.tags = dict(tags) name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME) if isinstance(name, str): if hasattr(ent, _GuiCoreContext.__PROP_ENTITY_NAME): diff --git a/tests/core/data/test_filter_data_node.py b/tests/core/data/test_filter_data_node.py index d1fcf0af8a..a10dcc3eb0 100644 --- a/tests/core/data/test_filter_data_node.py +++ b/tests/core/data/test_filter_data_node.py @@ -244,7 +244,7 @@ def test_filter_by_get_item(default_data_frame): filtered_custom_dn = custom_dn["a"] assert isinstance(filtered_custom_dn, List) assert len(filtered_custom_dn) == 10 - assert filtered_custom_dn == [i for i in range(10)] + assert filtered_custom_dn == list(range(10)) filtered_custom_dn = custom_dn[0:5] assert isinstance(filtered_custom_dn, List) diff --git a/tests/core/data/test_generic_data_node.py b/tests/core/data/test_generic_data_node.py index ee907e5fa1..b2a2f58bdc 100644 --- a/tests/core/data/test_generic_data_node.py +++ b/tests/core/data/test_generic_data_node.py @@ -46,11 +46,11 @@ def read_fct_modify_data_node_name(data_node_id: DataNodeId, name: str): def reset_data(): - TestGenericDataNode.data = [i for i in range(10)] + TestGenericDataNode.data = list(range(10)) class TestGenericDataNode: - data = [i for i in range(10)] + data = list(range(10)) def test_create(self): dn = GenericDataNode( diff --git a/tests/core/job/test_job_manager.py b/tests/core/job/test_job_manager.py index 9e0f15d214..91c85927a3 100644 --- a/tests/core/job/test_job_manager.py +++ b/tests/core/job/test_job_manager.py @@ -411,10 +411,12 @@ def test_cancel_subsequent_jobs(): assert_true_after_time(job_4.is_canceled) assert_true_after_time(job_5.is_abandoned) assert_true_after_time(job_6.is_abandoned) - assert_true_after_time(lambda: all( - not _OrchestratorFactory._orchestrator._is_blocked(job) - for job in [job_1, job_2, job_3, job_4, job_5, job_6] - )) + assert_true_after_time( + lambda: all( + not _OrchestratorFactory._orchestrator._is_blocked(job) + for job in [job_1, job_2, job_3, job_4, job_5, job_6] + ) + ) assert_true_after_time(lambda: _OrchestratorFactory._orchestrator.jobs_to_run.qsize() == 0) @@ -474,7 +476,7 @@ def _create_task(function, nb_outputs=1, name=None): output_dn_configs = [ Config.configure_data_node(f"output{i}", "pickle", Scope.SCENARIO, default_data=0) for i in range(nb_outputs) ] - _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs}) + _DataManager._bulk_get_or_create(dict(output_dn_configs)) name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10)) task_config = Config.configure_task( id=name, diff --git a/tests/core/job/test_job_manager_with_sql_repo.py b/tests/core/job/test_job_manager_with_sql_repo.py index b35ff001b5..7ae75eff6d 100644 --- a/tests/core/job/test_job_manager_with_sql_repo.py +++ b/tests/core/job/test_job_manager_with_sql_repo.py @@ -245,7 +245,7 @@ def _create_task(function, nb_outputs=1, name=None): output_dn_configs = [ Config.configure_data_node(f"output{i}", scope=Scope.SCENARIO, default_data=0) for i in range(nb_outputs) ] - _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs}) + _DataManager._bulk_get_or_create(dict(output_dn_configs)) name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10)) task_config = Config.configure_task( id=name,