Skip to content

Commit

Permalink
feat(general): add rustworkx (#5511) (#5565)
Browse files Browse the repository at this point in the history
* feat(general): add rustworkx (#5511)

* .

* .

* .

* .

* .

* add _get_operation for rustworkx

* add _get_operation for rustworkx

* add _get_operation for rustworkx

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* fix typing

* fix tests

* fix linting

* .

* .

* .

* .

* .

---------

Co-authored-by: gruebel <[email protected]>
(cherry picked from commit ef1433e)

* .

* .

* .

* Relocked pipfile with python3.8

---------

Co-authored-by: Barak Fatal <[email protected]>
  • Loading branch information
lirshindalman and bo156 authored Sep 26, 2023
1 parent 26109f3 commit 286d1f3
Show file tree
Hide file tree
Showing 76 changed files with 715 additions and 256 deletions.
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ yarl = "*"
openai = "*"
spdx-tools = ">=0.8.0,<0.9.0"
license-expression = "*"
rustworkx = "*"

[requires]
python_version = "3.8"
309 changes: 227 additions & 82 deletions Pipfile.lock

Large diffs are not rendered by default.

23 changes: 19 additions & 4 deletions checkov/common/bridgecrew/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import dpath
from igraph import Graph
from rustworkx import PyDiGraph, digraph_node_link_json # type: ignore

try:
from networkx import DiGraph, node_link_data
Expand All @@ -19,6 +20,7 @@
DiGraph = str
node_link_data = lambda G : {}


from checkov.common.bridgecrew.check_type import CheckType
from checkov.common.models.consts import SUPPORTED_FILE_EXTENSIONS
from checkov.common.typing import _ReducedScanReport
Expand All @@ -38,6 +40,10 @@
secrets_check_reduced_keys = check_reduced_keys + ('validation_status',)
check_metadata_keys = ('evaluations', 'code_block', 'workflow_name', 'triggers', 'job')

FILE_NAME_NETWORKX = 'graph_networkx.json'
FILE_NAME_IGRAPH = 'graph_igraph.json'
FILE_NAME_RUSTWORKX = 'graph_rustworkx.json'


def _is_scanned_file(file: str) -> bool:
file_ending = os.path.splitext(file)[1]
Expand Down Expand Up @@ -146,15 +152,24 @@ def enrich_and_persist_checks_metadata(
return checks_metadata_paths


def persist_graphs(graphs: dict[str, DiGraph | Graph], s3_client: S3Client, bucket: str, full_repo_object_key: str,
timeout: int, absolute_root_folder: str = '') -> None:
def persist_graphs(
graphs: dict[str, DiGraph | Graph | PyDiGraph[Any, Any]],
s3_client: S3Client,
bucket: str,
full_repo_object_key: str,
timeout: int,
absolute_root_folder: str = '',
) -> None:
def _upload_graph(check_type: str, graph: DiGraph | Graph, _absolute_root_folder: str = '') -> None:
if isinstance(graph, DiGraph):
json_obj = node_link_data(graph)
graph_file_name = 'graph_networkx.json'
graph_file_name = FILE_NAME_NETWORKX
elif isinstance(graph, Graph):
json_obj = serialize_to_json(graph, _absolute_root_folder)
graph_file_name = 'graph_igraph.json'
graph_file_name = FILE_NAME_IGRAPH
elif isinstance(graph, PyDiGraph):
json_obj = digraph_node_link_json(graph)
graph_file_name = FILE_NAME_RUSTWORKX
else:
logging.error(f"unsupported graph type '{graph.__class__.__name__}'")
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from igraph import Graph
from bc_jsonpath_ng.ext import parse
from networkx import DiGraph

from checkov.common.graph.checks_infra import debug
from checkov.common.graph.checks_infra.enums import SolverType
Expand Down Expand Up @@ -65,8 +66,17 @@ def run(self, graph_connector: LibraryGraph) -> Tuple[List[Dict[str, Any]], List
failed_vertices.append(data)

return passed_vertices, failed_vertices, unknown_vertices
elif isinstance(graph_connector, DiGraph):
for _, data in graph_connector.nodes(data=True):
if (not self.resource_types or data.get(CustomAttributes.RESOURCE_TYPE) in self.resource_types) \
and data.get(CustomAttributes.BLOCK_TYPE) in SUPPORTED_BLOCK_TYPES:
jobs.append(executer.submit(
self._process_node, data, passed_vertices, failed_vertices, unknown_vertices))

concurrent.futures.wait(jobs)
return passed_vertices, failed_vertices, unknown_vertices

for _, data in graph_connector.nodes(data=True):
for _, data in graph_connector.nodes():
if (not self.resource_types or data.get(CustomAttributes.RESOURCE_TYPE) in self.resource_types) \
and data.get(CustomAttributes.BLOCK_TYPE) in SUPPORTED_BLOCK_TYPES:
jobs.append(executer.submit(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import List, Any, Tuple, Dict, TYPE_CHECKING, Optional

from igraph import Graph
from networkx import DiGraph

from checkov.common.graph.checks_infra import debug
from checkov.common.graph.checks_infra.enums import SolverType
Expand Down Expand Up @@ -59,8 +60,19 @@ def run(self, graph_connector: LibraryGraph) -> Tuple[List[Dict[str, Any]], List
)

return passed_vertices, failed_vertices, unknown_vertices
elif isinstance(graph_connector, DiGraph):
for _, data in graph_connector.nodes(data=True):
if self.resource_type_pred(data, self.resource_types):
result = self.get_operation(data)
if result is None:
unknown_vertices.append(data)
elif result:
passed_vertices.append(data)
else:
failed_vertices.append(data)
return passed_vertices, failed_vertices, unknown_vertices

for _, data in graph_connector.nodes(data=True):
for _, data in graph_connector.nodes():
if self.resource_type_pred(data, self.resource_types):
result = self.get_operation(data)
if result is None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import Any, List, Dict, Optional, Tuple, TYPE_CHECKING

from igraph import Graph
from networkx import DiGraph

from checkov.common.graph.checks_infra.enums import SolverType
from checkov.common.graph.checks_infra.solvers.base_solver import BaseSolver
Expand Down Expand Up @@ -61,14 +62,24 @@ def set_vertices(self, graph_connector: LibraryGraph, exclude_vertices: List[Dic
self.vertices_under_connected_resources_types = [
data for data in graph_connector.vs.select(resource_type_in=self.connected_resources_types)["attr"]
]
else:
elif isinstance(graph_connector, DiGraph):
self.vertices_under_resource_types = [
v for _, v in graph_connector.nodes(data=True) if self.resource_type_pred(v, self.resource_types)
]
self.vertices_under_connected_resources_types = [
v for _, v in graph_connector.nodes(data=True) if self.resource_type_pred(v, self.connected_resources_types)
]

# isinstance(graph_connector, PyDiGraph):
else:
self.vertices_under_resource_types = [
v for _, v in graph_connector.nodes() if self.resource_type_pred(v, self.resource_types)
]
self.vertices_under_connected_resources_types = [
v for _, v in graph_connector.nodes() if
self.resource_type_pred(v, self.connected_resources_types)
]

self.excluded_vertices = [
v
for v in itertools.chain(self.vertices_under_resource_types, self.vertices_under_connected_resources_types)
Expand All @@ -92,7 +103,7 @@ def reduce_graph_by_target_types(self, graph_connector: LibraryGraph) -> Library
connection_nodes = {
vertex for vertex in graph_connector.vs.select(block_type__in=BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES)
}
else:
elif isinstance(graph_connector, DiGraph):
resource_nodes = {
node
for node, resource_type in graph_connector.nodes(data=CustomAttributes.RESOURCE_TYPE)
Expand All @@ -106,9 +117,24 @@ def reduce_graph_by_target_types(self, graph_connector: LibraryGraph) -> Library
if block_type in BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES
}

# isinstance(graph_connector, PyDiGraph):
else:
resource_nodes = {
index
for index, node in graph_connector.nodes()
if self.resource_type_pred(node, list(self.targeted_resources_types))
}

# tuple needs to be adjusted, if more connection block types are supported
connection_nodes = {
index
for index, node in graph_connector.nodes()
if node['block_type_'] in BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES
}

resource_nodes.update(connection_nodes)

return graph_connector.subgraph(resource_nodes)
return graph_connector.subgraph(list(resource_nodes))

def populate_checks_results(self, origin_attributes: Dict[str, Any], destination_attributes: Dict[str, Any], passed: List[Dict[str, Any]], failed: List[Dict[str, Any]], unknown: List[Dict[str, Any]]) -> None:
if origin_attributes in self.excluded_vertices or destination_attributes in self.excluded_vertices:
Expand Down
Loading

0 comments on commit 286d1f3

Please sign in to comment.