Skip to content

Commit

Permalink
clean C416 rule for codestyle
Browse files Browse the repository at this point in the history
  • Loading branch information
ooooo-create committed Mar 3, 2024
1 parent 750afd0 commit cec2bd2
Show file tree
Hide file tree
Showing 9 changed files with 16 additions and 15 deletions.
2 changes: 1 addition & 1 deletion doc/gui/examples/charts/treemap-simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
for i in range(2, n_numbers):
fibonacci.append(fibonacci[i - 1] + fibonacci[i - 2])

data = {"index": [i for i in range(1, n_numbers + 1)], "fibonacci": fibonacci}
data = {"index": list(range(1, n_numbers + 1)), "fibonacci": fibonacci}

page = """
# TreeMap - Simple
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ ignore = [ # TODO: to be removed
"C405", # Unnecessary list literal - rewrite as a literal
"C408", # Unnecessary dict call - rewrite as a literal
"C409", # Unnecessary list passed to tuple() - rewrite as a tuple literal
"C416", # Unnecessary `set` comprehension (rewrite using `set()`)
]

# Allow fix for all enabled rules (when `--fix`) is provided.
Expand Down
2 changes: 1 addition & 1 deletion taipy/core/_entity/_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, src: _Node, dest: _Node):

class _DAG:
def __init__(self, dag: nx.DiGraph):
self._sorted_nodes = [nodes for nodes in nx.topological_generations(dag)]
self._sorted_nodes = list(nx.topological_generations(dag))
self._length, self._width = self.__compute_size()
self._grid_length, self._grid_width = self.__compute_grid_size()
self._nodes = self.__compute_nodes()
Expand Down
2 changes: 1 addition & 1 deletion taipy/core/data/_abstract_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def _read_as_pandas_dataframe(

# On pandas 1.3.5 there's a bug that makes that the dataframe from sqlalchemy query is
# created without headers
keys = [col for col in result.keys()]
keys = list(result.keys())
if columns:
return pd.DataFrame(result, columns=keys)[columns]
return pd.DataFrame(result, columns=keys)
Expand Down
4 changes: 2 additions & 2 deletions taipy/gui_core/_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def process_event(self, event: Event):
if sequence and hasattr(sequence, "parent_ids") and sequence.parent_ids: # type: ignore
self.gui._broadcast(
_GuiCoreContext._CORE_CHANGED_NAME,
{"scenario": [x for x in sequence.parent_ids]}, # type: ignore
{"scenario": list(sequence.parent_ids)}, # type: ignore
)
except Exception as e:
_warn(f"Access to sequence {event.entity_id} failed", e)
Expand Down Expand Up @@ -650,7 +650,7 @@ def __edit_properties(self, entity: t.Union[Scenario, Sequence, DataNode], data:
if isinstance(ent, Scenario):
tags = data.get(_GuiCoreContext.__PROP_SCENARIO_TAGS)
if isinstance(tags, (list, tuple)):
ent.tags = {t for t in tags}
ent.tags = dict(tags)
name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME)
if isinstance(name, str):
if hasattr(ent, _GuiCoreContext.__PROP_ENTITY_NAME):
Expand Down
2 changes: 1 addition & 1 deletion tests/core/data/test_filter_data_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def test_filter_by_get_item(default_data_frame):
filtered_custom_dn = custom_dn["a"]
assert isinstance(filtered_custom_dn, List)
assert len(filtered_custom_dn) == 10
assert filtered_custom_dn == [i for i in range(10)]
assert filtered_custom_dn == list(range(10))

filtered_custom_dn = custom_dn[0:5]
assert isinstance(filtered_custom_dn, List)
Expand Down
4 changes: 2 additions & 2 deletions tests/core/data/test_generic_data_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,11 @@ def read_fct_modify_data_node_name(data_node_id: DataNodeId, name: str):


def reset_data():
TestGenericDataNode.data = [i for i in range(10)]
TestGenericDataNode.data = list(range(10))


class TestGenericDataNode:
data = [i for i in range(10)]
data = list(range(10))

def test_create(self):
dn = GenericDataNode(
Expand Down
12 changes: 7 additions & 5 deletions tests/core/job/test_job_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,10 +411,12 @@ def test_cancel_subsequent_jobs():
assert_true_after_time(job_4.is_canceled)
assert_true_after_time(job_5.is_abandoned)
assert_true_after_time(job_6.is_abandoned)
assert_true_after_time(lambda: all(
not _OrchestratorFactory._orchestrator._is_blocked(job)
for job in [job_1, job_2, job_3, job_4, job_5, job_6]
))
assert_true_after_time(
lambda: all(
not _OrchestratorFactory._orchestrator._is_blocked(job)
for job in [job_1, job_2, job_3, job_4, job_5, job_6]
)
)
assert_true_after_time(lambda: _OrchestratorFactory._orchestrator.jobs_to_run.qsize() == 0)


Expand Down Expand Up @@ -474,7 +476,7 @@ def _create_task(function, nb_outputs=1, name=None):
output_dn_configs = [
Config.configure_data_node(f"output{i}", "pickle", Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
]
_DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
_DataManager._bulk_get_or_create(dict(output_dn_configs))
name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
task_config = Config.configure_task(
id=name,
Expand Down
2 changes: 1 addition & 1 deletion tests/core/job/test_job_manager_with_sql_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def _create_task(function, nb_outputs=1, name=None):
output_dn_configs = [
Config.configure_data_node(f"output{i}", scope=Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
]
_DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
_DataManager._bulk_get_or_create(dict(output_dn_configs))
name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
task_config = Config.configure_task(
id=name,
Expand Down

0 comments on commit cec2bd2

Please sign in to comment.