From dff506f9f087d25f57f0423b3e24eab753d0a858 Mon Sep 17 00:00:00 2001 From: phlax Date: Sun, 21 Mar 2021 16:19:34 +0000 Subject: [PATCH] python: Fix docstring formatting (#15570) Signed-off-by: Ryan Northey --- tools/api_proto_plugin/annotations.py | 34 +-- tools/api_proto_plugin/plugin.py | 16 +- tools/api_proto_plugin/traverse.py | 56 ++-- tools/api_proto_plugin/type_context.py | 134 +++++----- tools/api_proto_plugin/utils.py | 26 +- .../generate_api_version_header.py | 38 +-- tools/code_format/format_python_tools.py | 10 +- tools/config_validation/validate_fragment.py | 18 +- tools/dependency/cve_scan.py | 128 ++++----- tools/deprecate_version/deprecate_version.py | 10 +- tools/envoy_collect/envoy_collect.py | 40 +-- tools/envoy_headersplit/headersplit.py | 246 +++++++++--------- tools/envoy_headersplit/replace_includes.py | 44 ++-- tools/proto_format/proto_sync.py | 126 ++++----- tools/protodoc/protodoc.py | 204 +++++++-------- tools/protoxform/migrate.py | 46 ++-- tools/protoxform/options.py | 42 +-- tools/protoxform/protoprint.py | 186 ++++++------- tools/protoxform/protoxform.py | 4 +- tools/protoxform/protoxform_test_helper.py | 78 +++--- tools/type_whisperer/type_whisperer.py | 4 +- tools/type_whisperer/typedb_gen.py | 32 +-- 22 files changed, 761 insertions(+), 761 deletions(-) diff --git a/tools/api_proto_plugin/annotations.py b/tools/api_proto_plugin/annotations.py index ca55b9e14b82..f23898dc8dd4 100644 --- a/tools/api_proto_plugin/annotations.py +++ b/tools/api_proto_plugin/annotations.py @@ -56,14 +56,14 @@ class AnnotationError(Exception): def extract_annotations(s, inherited_annotations=None): """Extract annotations map from a given comment string. - Args: - s: string that may contains annotations. - inherited_annotations: annotation map from file-level inherited annotations - (or None) if this is a file-level comment. - - Returns: - Annotation map. - """ + Args: + s: string that may contains annotations. + inherited_annotations: annotation map from file-level inherited annotations + (or None) if this is a file-level comment. + + Returns: + Annotation map. + """ annotations = { k: v for k, v in (inherited_annotations or {}).items() if k in INHERITED_ANNOTATIONS } @@ -80,17 +80,17 @@ def extract_annotations(s, inherited_annotations=None): def xform_annotation(s, annotation_xforms): """Return transformed string with annotation transformers. - The annotation will be replaced with the new value returned by the transformer. - If the transformer returns None, then the annotation will be removed. - If the annotation presented in transformers doesn't exist in the original string, - a new annotation will be appended to the end of string. + The annotation will be replaced with the new value returned by the transformer. + If the transformer returns None, then the annotation will be removed. + If the annotation presented in transformers doesn't exist in the original string, + a new annotation will be appended to the end of string. - Args: - annotation_xforms: a dict of transformers for annotations. + Args: + annotation_xforms: a dict of transformers for annotations. - Returns: - transformed string. - """ + Returns: + transformed string. + """ present_annotations = set() def xform(match): diff --git a/tools/api_proto_plugin/plugin.py b/tools/api_proto_plugin/plugin.py index f9935ca62f23..1ed47976f29e 100644 --- a/tools/api_proto_plugin/plugin.py +++ b/tools/api_proto_plugin/plugin.py @@ -38,16 +38,16 @@ def direct_output_descriptor(output_suffix, visitor, want_params=False): def plugin(output_descriptors): """Protoc plugin entry point. - This defines protoc plugin and manages the stdin -> stdout flow. An - api_proto_plugin is defined by the provided visitor. + This defines protoc plugin and manages the stdin -> stdout flow. An + api_proto_plugin is defined by the provided visitor. - See - http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ - for further details on protoc plugin basics. + See + http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ + for further details on protoc plugin basics. - Args: - output_descriptors: a list of OutputDescriptors. - """ + Args: + output_descriptors: a list of OutputDescriptors. + """ request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.buffer.read()) response = plugin_pb2.CodeGeneratorResponse() diff --git a/tools/api_proto_plugin/traverse.py b/tools/api_proto_plugin/traverse.py index 0ddc2f131b14..83b2d67b7246 100644 --- a/tools/api_proto_plugin/traverse.py +++ b/tools/api_proto_plugin/traverse.py @@ -6,42 +6,42 @@ def traverse_service(type_context, service_proto, visitor): """Traverse a service definition. - Args: - type_context: type_context.TypeContext for service type. - service_proto: ServiceDescriptorProto for service. - visitor: visitor.Visitor defining the business logic of the plugin. - - Returns: - Plugin specific output. - """ + Args: + type_context: type_context.TypeContext for service type. + service_proto: ServiceDescriptorProto for service. + visitor: visitor.Visitor defining the business logic of the plugin. + + Returns: + Plugin specific output. + """ return visitor.visit_service(service_proto, type_context) def traverse_enum(type_context, enum_proto, visitor): """Traverse an enum definition. - Args: - type_context: type_context.TypeContext for enum type. - enum_proto: EnumDescriptorProto for enum. - visitor: visitor.Visitor defining the business logic of the plugin. + Args: + type_context: type_context.TypeContext for enum type. + enum_proto: EnumDescriptorProto for enum. + visitor: visitor.Visitor defining the business logic of the plugin. - Returns: - Plugin specific output. - """ + Returns: + Plugin specific output. + """ return visitor.visit_enum(enum_proto, type_context) def traverse_message(type_context, msg_proto, visitor): """Traverse a message definition. - Args: - type_context: type_context.TypeContext for message type. - msg_proto: DescriptorProto for message. - visitor: visitor.Visitor defining the business logic of the plugin. + Args: + type_context: type_context.TypeContext for message type. + msg_proto: DescriptorProto for message. + visitor: visitor.Visitor defining the business logic of the plugin. - Returns: - Plugin specific output. - """ + Returns: + Plugin specific output. + """ # We need to do some extra work to recover the map type annotation from the # synthesized messages. type_context.map_typenames = { @@ -67,13 +67,13 @@ def traverse_message(type_context, msg_proto, visitor): def traverse_file(file_proto, visitor): """Traverse a proto file definition. - Args: - file_proto: FileDescriptorProto for file. - visitor: visitor.Visitor defining the business logic of the plugin. + Args: + file_proto: FileDescriptorProto for file. + visitor: visitor.Visitor defining the business logic of the plugin. - Returns: - Plugin specific output. - """ + Returns: + Plugin specific output. + """ source_code_info = type_context.SourceCodeInfo(file_proto.name, file_proto.source_code_info) package_type_context = type_context.TypeContext(source_code_info, file_proto.package) services = [ diff --git a/tools/api_proto_plugin/type_context.py b/tools/api_proto_plugin/type_context.py index 70fcc2ec31ea..7429b3474297 100644 --- a/tools/api_proto_plugin/type_context.py +++ b/tools/api_proto_plugin/type_context.py @@ -16,12 +16,12 @@ def __init__(self, comment, file_level_annotations=None): def get_comment_with_transforms(self, annotation_xforms): """Return transformed comment with annotation transformers. - Args: - annotation_xforms: a dict of transformers for annotations in leading comment. + Args: + annotation_xforms: a dict of transformers for annotations in leading comment. - Returns: - transformed Comment object. - """ + Returns: + transformed Comment object. + """ return Comment(annotations.xform_annotation(self.raw, annotation_xforms), self.file_level_annotations) @@ -68,13 +68,13 @@ def file_level_annotations(self): def location_path_lookup(self, path): """Lookup SourceCodeInfo.Location by path in SourceCodeInfo. - Args: - path: a list of path indexes as per - https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. + Args: + path: a list of path indexes as per + https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. - Returns: - SourceCodeInfo.Location object if found, otherwise None. - """ + Returns: + SourceCodeInfo.Location object if found, otherwise None. + """ return self._locations.get(str(path), None) # TODO(htuch): consider integrating comment lookup with overall @@ -82,13 +82,13 @@ def location_path_lookup(self, path): def leading_comment_path_lookup(self, path): """Lookup leading comment by path in SourceCodeInfo. - Args: - path: a list of path indexes as per - https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. + Args: + path: a list of path indexes as per + https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. - Returns: - Comment object. - """ + Returns: + Comment object. + """ location = self.location_path_lookup(path) if location is not None: return Comment(location.leading_comments, self.file_level_annotations) @@ -97,13 +97,13 @@ def leading_comment_path_lookup(self, path): def leading_detached_comments_path_lookup(self, path): """Lookup leading detached comments by path in SourceCodeInfo. - Args: - path: a list of path indexes as per - https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. + Args: + path: a list of path indexes as per + https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. - Returns: - List of detached comment strings. - """ + Returns: + List of detached comment strings. + """ location = self.location_path_lookup(path) if location is not None and location.leading_detached_comments != self.file_level_comments: return location.leading_detached_comments @@ -112,13 +112,13 @@ def leading_detached_comments_path_lookup(self, path): def trailing_comment_path_lookup(self, path): """Lookup trailing comment by path in SourceCodeInfo. - Args: - path: a list of path indexes as per - https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. + Args: + path: a list of path indexes as per + https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717. - Returns: - Raw detached comment string - """ + Returns: + Raw detached comment string + """ location = self.location_path_lookup(path) if location is not None: return location.trailing_comments @@ -128,9 +128,9 @@ def trailing_comment_path_lookup(self, path): class TypeContext(object): """Contextual information for a message/field. - Provides information around namespaces and enclosing types for fields and - nested messages/enums. - """ + Provides information around namespaces and enclosing types for fields and + nested messages/enums. + """ def __init__(self, source_code_info, name): # SourceCodeInfo as per @@ -173,49 +173,49 @@ def _extend(self, path, type_name, name, deprecated=False): def extend_message(self, index, name, deprecated): """Extend type context with a message. - Args: - index: message index in file. - name: message name. - deprecated: is the message depreacted? - """ + Args: + index: message index in file. + name: message name. + deprecated: is the message depreacted? + """ return self._extend([4, index], 'message', name, deprecated) def extend_nested_message(self, index, name, deprecated): """Extend type context with a nested message. - Args: - index: nested message index in message. - name: message name. - deprecated: is the message depreacted? - """ + Args: + index: nested message index in message. + name: message name. + deprecated: is the message depreacted? + """ return self._extend([3, index], 'message', name, deprecated) def extend_field(self, index, name): """Extend type context with a field. - Args: - index: field index in message. - name: field name. - """ + Args: + index: field index in message. + name: field name. + """ return self._extend([2, index], 'field', name) def extend_enum(self, index, name, deprecated): """Extend type context with an enum. - Args: - index: enum index in file. - name: enum name. - deprecated: is the message depreacted? - """ + Args: + index: enum index in file. + name: enum name. + deprecated: is the message depreacted? + """ return self._extend([5, index], 'enum', name, deprecated) def extend_service(self, index, name): """Extend type context with a service. - Args: - index: service index in file. - name: service name. - """ + Args: + index: service index in file. + name: service name. + """ return self._extend([6, index], 'service', name) def extend_nested_enum(self, index, name, deprecated): @@ -231,28 +231,28 @@ def extend_nested_enum(self, index, name, deprecated): def extend_enum_value(self, index, name): """Extend type context with an enum enum. - Args: - index: enum value index in enum. - name: value name. - """ + Args: + index: enum value index in enum. + name: value name. + """ return self._extend([2, index], 'enum_value', name) def extend_oneof(self, index, name): """Extend type context with an oneof declaration. - Args: - index: oneof index in oneof_decl. - name: oneof name. - """ + Args: + index: oneof index in oneof_decl. + name: oneof name. + """ return self._extend([8, index], 'oneof', name) def extend_method(self, index, name): """Extend type context with a service method declaration. - Args: - index: method index in service. - name: method name. - """ + Args: + index: method index in service. + name: method name. + """ return self._extend([2, index], 'method', name) @property diff --git a/tools/api_proto_plugin/utils.py b/tools/api_proto_plugin/utils.py index 256fb793353b..049dec343644 100644 --- a/tools/api_proto_plugin/utils.py +++ b/tools/api_proto_plugin/utils.py @@ -4,13 +4,13 @@ def proto_file_canonical_from_label(label): """Compute path from API root to a proto file from a Bazel proto label. - Args: - label: Bazel source proto label string. + Args: + label: Bazel source proto label string. - Returns: - A string with the path, e.g. for @envoy_api//envoy/type/matcher:metadata.proto - this would be envoy/type/matcher/matcher.proto. - """ + Returns: + A string with the path, e.g. for @envoy_api//envoy/type/matcher:metadata.proto + this would be envoy/type/matcher/matcher.proto. + """ assert (label.startswith('@envoy_api_canonical//')) return label[len('@envoy_api_canonical//'):].replace(':', '/') @@ -18,14 +18,14 @@ def proto_file_canonical_from_label(label): def bazel_bin_path_for_output_artifact(label, suffix, root=''): """Find the location in bazel-bin/ for an api_proto_plugin output file. - Args: - label: Bazel source proto label string. - suffix: output suffix for the artifact from label, e.g. ".types.pb_text". - root: location of bazel-bin/, if not specified, PWD. + Args: + label: Bazel source proto label string. + suffix: output suffix for the artifact from label, e.g. ".types.pb_text". + root: location of bazel-bin/, if not specified, PWD. - Returns: - Path in bazel-bin/external/envoy_api_canonical for label output with given suffix. - """ + Returns: + Path in bazel-bin/external/envoy_api_canonical for label output with given suffix. + """ proto_file_path = proto_file_canonical_from_label(label) return os.path.join(root, 'bazel-bin/external/envoy_api_canonical', os.path.dirname(proto_file_path), 'pkg', proto_file_path + suffix) diff --git a/tools/api_versioning/generate_api_version_header.py b/tools/api_versioning/generate_api_version_header.py index 9fb0c990a224..4e292f96947f 100644 --- a/tools/api_versioning/generate_api_version_header.py +++ b/tools/api_versioning/generate_api_version_header.py @@ -22,14 +22,14 @@ def generate_header_file(input_path): """Generates a c++ header file containing the api_version variable with the - correct value. + correct value. - Args: - input_path: the file containing the API version (API_VERSION). + Args: + input_path: the file containing the API version (API_VERSION). - Returns: - the header file contents. - """ + Returns: + the header file contents. + """ lines = pathlib.Path(input_path).read_text().splitlines() assert (len(lines) == 1) @@ -50,19 +50,19 @@ def generate_header_file(input_path): def compute_oldest_api_version(current_version: ApiVersion): """Computest the oldest API version the client supports. According to the - specification (see: api/API_VERSIONING.md), Envoy supports up to 2 most - recent minor versions. Therefore if the latest API version "X.Y.Z", Envoy's - oldest API version is "X.Y-1.0". Note that the major number is always the - same as the latest version, and the patch number is always 0. In addition, - the minor number is at least 0, and the oldest api version cannot be set - to a previous major number. - - Args: - current_version: the current API version. - - Returns: - the oldest supported API version. - """ + specification (see: api/API_VERSIONING.md), Envoy supports up to 2 most + recent minor versions. Therefore if the latest API version "X.Y.Z", Envoy's + oldest API version is "X.Y-1.0". Note that the major number is always the + same as the latest version, and the patch number is always 0. In addition, + the minor number is at least 0, and the oldest api version cannot be set + to a previous major number. + + Args: + current_version: the current API version. + + Returns: + the oldest supported API version. + """ return ApiVersion(current_version.major, max(current_version.minor - 1, 0), 0) diff --git a/tools/code_format/format_python_tools.py b/tools/code_format/format_python_tools.py index c54779df4595..c63c0595eb4e 100644 --- a/tools/code_format/format_python_tools.py +++ b/tools/code_format/format_python_tools.py @@ -11,9 +11,9 @@ def collect_files(): """Collect all Python files in the tools directory. - Returns: A collection of python files in the tools directory excluding - any directories in the EXCLUDE_LIST constant. - """ + Returns: A collection of python files in the tools directory excluding + any directories in the EXCLUDE_LIST constant. + """ # TODO: Add ability to collect a specific file or files. matches = [] path_parts = os.getcwd().split('/') @@ -32,8 +32,8 @@ def validate_format(fix=False): """Check the format of python files in the tools directory. Arguments: - fix: a flag to indicate if fixes should be applied. - """ + fix: a flag to indicate if fixes should be applied. + """ fixes_required = False failed_update_files = set() successful_update_files = set() diff --git a/tools/config_validation/validate_fragment.py b/tools/config_validation/validate_fragment.py index bc8a9510b143..2b690004c3bd 100644 --- a/tools/config_validation/validate_fragment.py +++ b/tools/config_validation/validate_fragment.py @@ -25,15 +25,15 @@ def validate_fragment(type_name, fragment): """Validate a dictionary representing a JSON/YAML fragment against an Envoy API proto3 type. - Throws Protobuf errors on parsing exceptions, successful validations produce - no result. - - Args: - type_name: a string providing the type name, e.g. - envoy.config.bootstrap.v3.Bootstrap. - fragment: a dictionary representing the parsed JSON/YAML configuration - fragment. - """ + Throws Protobuf errors on parsing exceptions, successful validations produce + no result. + + Args: + type_name: a string providing the type name, e.g. + envoy.config.bootstrap.v3.Bootstrap. + fragment: a dictionary representing the parsed JSON/YAML configuration + fragment. + """ json_fragment = json.dumps(fragment) r = runfiles.Create() diff --git a/tools/dependency/cve_scan.py b/tools/dependency/cve_scan.py index 68b8b6603d58..76ce891863b4 100755 --- a/tools/dependency/cve_scan.py +++ b/tools/dependency/cve_scan.py @@ -62,7 +62,7 @@ class Cpe(namedtuple('CPE', ['part', 'vendor', 'product', 'version'])): - '''Model a subset of CPE fields that are used in CPE matching.''' + """Model a subset of CPE fields that are used in CPE matching.""" @classmethod def from_string(cls, cpe_str): @@ -75,18 +75,18 @@ def __str__(self): return f'cpe:2.3:{self.part}:{self.vendor}:{self.product}:{self.version}' def vendor_normalized(self): - '''Return a normalized CPE where only part and vendor are significant.''' + """Return a normalized CPE where only part and vendor are significant.""" return Cpe(self.part, self.vendor, '*', '*') def parse_cve_json(cve_json, cves, cpe_revmap): - '''Parse CVE JSON dictionary. + """Parse CVE JSON dictionary. - Args: - cve_json: a NIST CVE JSON dictionary. - cves: dictionary mapping CVE ID string to Cve object (output). - cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string. - ''' + Args: + cve_json: a NIST CVE JSON dictionary. + cves: dictionary mapping CVE ID string to Cve object (output). + cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string. + """ # This provides an over-approximation of possible CPEs affected by CVE nodes # metadata; it traverses the entire AND-OR tree and just gathers every CPE @@ -123,14 +123,14 @@ def parse_cve_date(date_str): def download_cve_data(urls): - '''Download NIST CVE JSON databases from given URLs and parse. - - Args: - urls: a list of URLs. - Returns: - cves: dictionary mapping CVE ID string to Cve object (output). - cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string. - ''' + """Download NIST CVE JSON databases from given URLs and parse. + + Args: + urls: a list of URLs. + Returns: + cves: dictionary mapping CVE ID string to Cve object (output). + cpe_revmap: a reverse map from vendor normalized CPE to CVE ID string. + """ cves = {} cpe_revmap = defaultdict(set) for url in urls: @@ -144,7 +144,7 @@ def download_cve_data(urls): def format_cve_details(cve, deps): formatted_deps = ', '.join(sorted(deps)) wrapped_description = '\n '.join(textwrap.wrap(cve.description)) - return f''' + return f""" CVE ID: {cve.id} CVSS v3 score: {cve.score} Severity: {cve.severity} @@ -153,7 +153,7 @@ def format_cve_details(cve, deps): Dependencies: {formatted_deps} Description: {wrapped_description} Affected CPEs: - ''' + '\n '.join(f'- {cpe}' for cpe in cve.cpes) + """ + '\n '.join(f'- {cpe}' for cpe in cve.cpes) FUZZY_DATE_RE = re.compile('(\d{4}).?(\d{2}).?(\d{2})') @@ -161,15 +161,15 @@ def format_cve_details(cve, deps): def regex_groups_match(regex, lhs, rhs): - '''Do two strings match modulo a regular expression? - - Args: - regex: regular expression - lhs: LHS string - rhs: RHS string - Returns: - A boolean indicating match. - ''' + """Do two strings match modulo a regular expression? + + Args: + regex: regular expression + lhs: LHS string + rhs: RHS string + Returns: + A boolean indicating match. + """ lhs_match = regex.search(lhs) if lhs_match: rhs_match = regex.search(rhs) @@ -179,22 +179,22 @@ def regex_groups_match(regex, lhs, rhs): def cpe_match(cpe, dep_metadata): - '''Heuristically match dependency metadata against CPE. - - We have a number of rules below that should are easy to compute without having - to look at the dependency metadata. In the future, with additional access to - repository information we could do the following: - - For dependencies at a non-release version, walk back through git history to - the last known release version and attempt a match with this. - - For dependencies at a non-release version, use the commit date to look for a - version match where version is YYYY-MM-DD. - - Args: - cpe: Cpe object to match against. - dep_metadata: dependency metadata dictionary. - Returns: - A boolean indicating a match. - ''' + """Heuristically match dependency metadata against CPE. + + We have a number of rules below that should are easy to compute without having + to look at the dependency metadata. In the future, with additional access to + repository information we could do the following: + - For dependencies at a non-release version, walk back through git history to + the last known release version and attempt a match with this. + - For dependencies at a non-release version, use the commit date to look for a + version match where version is YYYY-MM-DD. + + Args: + cpe: Cpe object to match against. + dep_metadata: dependency metadata dictionary. + Returns: + A boolean indicating a match. + """ dep_cpe = Cpe.from_string(dep_metadata['cpe']) dep_version = dep_metadata['version'] # The 'part' and 'vendor' must be an exact match. @@ -227,17 +227,17 @@ def cpe_match(cpe, dep_metadata): def cve_match(cve, dep_metadata): - '''Heuristically match dependency metadata against CVE. + """Heuristically match dependency metadata against CVE. - In general, we allow false positives but want to keep the noise low, to avoid - the toil around having to populate IGNORES_CVES. + In general, we allow false positives but want to keep the noise low, to avoid + the toil around having to populate IGNORES_CVES. - Args: - cve: Cve object to match against. - dep_metadata: dependency metadata dictionary. - Returns: - A boolean indicating a match. - ''' + Args: + cve: Cve object to match against. + dep_metadata: dependency metadata dictionary. + Returns: + A boolean indicating a match. + """ wildcard_version_match = False # Consider each CPE attached to the CVE for a match against the dependency CPE. for cpe in cve.cpes: @@ -258,18 +258,18 @@ def cve_match(cve, dep_metadata): def cve_scan(cves, cpe_revmap, cve_allowlist, repository_locations): - '''Scan for CVEs in a parsed NIST CVE database. - - Args: - cves: CVE dictionary as provided by download_cve_data(). - cve_revmap: CPE-CVE reverse map as provided by download_cve_data(). - cve_allowlist: an allowlist of CVE IDs to ignore. - repository_locations: a dictionary of dependency metadata in the format - described in api/bazel/external_deps.bzl. - Returns: - possible_cves: a dictionary mapping CVE IDs to Cve objects. - cve_deps: a dictionary mapping CVE IDs to dependency names. - ''' + """Scan for CVEs in a parsed NIST CVE database. + + Args: + cves: CVE dictionary as provided by download_cve_data(). + cve_revmap: CPE-CVE reverse map as provided by download_cve_data(). + cve_allowlist: an allowlist of CVE IDs to ignore. + repository_locations: a dictionary of dependency metadata in the format + described in api/bazel/external_deps.bzl. + Returns: + possible_cves: a dictionary mapping CVE IDs to Cve objects. + cve_deps: a dictionary mapping CVE IDs to dependency names. + """ possible_cves = {} cve_deps = defaultdict(list) for dep, metadata in repository_locations.items(): diff --git a/tools/deprecate_version/deprecate_version.py b/tools/deprecate_version/deprecate_version.py index 2452f041f9c3..1687f167138f 100644 --- a/tools/deprecate_version/deprecate_version.py +++ b/tools/deprecate_version/deprecate_version.py @@ -55,10 +55,10 @@ def get_confirmation(): def create_issues(access_token, runtime_and_pr): """Create issues in GitHub for code to clean up old runtime guarded features. - Args: - access_token: GitHub access token (see comment at top of file). - runtime_and_pr: a list of runtime guards and the PRs and commits they were added. - """ + Args: + access_token: GitHub access token (see comment at top of file). + runtime_and_pr: a list of runtime guards and the PRs and commits they were added. + """ git = github.Github(access_token) repo = git.get_repo('envoyproxy/envoy') @@ -134,7 +134,7 @@ def create_issues(access_token, runtime_and_pr): def get_runtime_and_pr(): """Returns a list of tuples of [runtime features to deprecate, PR, commit the feature was added] - """ + """ repo = Repo(os.getcwd()) # grep source code looking for reloadable features which are true to find the diff --git a/tools/envoy_collect/envoy_collect.py b/tools/envoy_collect/envoy_collect.py index 4198b2062f00..3d6347a4c2d7 100755 --- a/tools/envoy_collect/envoy_collect.py +++ b/tools/envoy_collect/envoy_collect.py @@ -66,13 +66,13 @@ def fetch_url(url): def modify_envoy_config(config_path, perf, output_directory): """Modify Envoy config to support gathering logs, etc. - Args: - config_path: the command-line specified Envoy config path. - perf: boolean indicating whether in performance mode. - output_directory: directory path for additional generated files. - Returns: - (modified Envoy config path, list of additional files to collect) - """ + Args: + config_path: the command-line specified Envoy config path. + perf: boolean indicating whether in performance mode. + output_directory: directory path for additional generated files. + Returns: + (modified Envoy config path, list of additional files to collect) + """ # No modifications yet when in performance profiling mode. if perf: return config_path, [] @@ -106,15 +106,15 @@ def modify_envoy_config(config_path, perf, output_directory): def run_envoy(envoy_shcmd_args, envoy_log_path, admin_address_path, dump_handlers_paths): """Run Envoy subprocess and trigger admin endpoint gathering on SIGINT. - Args: - envoy_shcmd_args: list of Envoy subprocess args. - envoy_log_path: path to write Envoy stderr log to. - admin_address_path: path to where admin address is written by Envoy. - dump_handlers_paths: map from admin endpoint handler to path to where the respective contents - are to be written. - Returns: - The Envoy subprocess exit code. - """ + Args: + envoy_shcmd_args: list of Envoy subprocess args. + envoy_log_path: path to write Envoy stderr log to. + admin_address_path: path to where admin address is written by Envoy. + dump_handlers_paths: map from admin endpoint handler to path to where the respective contents + are to be written. + Returns: + The Envoy subprocess exit code. + """ envoy_shcmd = ' '.join(map(pipes.quote, envoy_shcmd_args)) print(envoy_shcmd) @@ -156,10 +156,10 @@ def signal_handler(signum, frame): def envoy_collect(parse_result, unknown_args): """Run Envoy and collect its artifacts. - Args: - parse_result: Namespace object with envoy_collect.py's args. - unknown_args: list of remaining args to pass to Envoy binary. - """ + Args: + parse_result: Namespace object with envoy_collect.py's args. + unknown_args: list of remaining args to pass to Envoy binary. + """ # Are we in performance mode? Otherwise, debug. perf = parse_result.performance return_code = 1 # Non-zero default return. diff --git a/tools/envoy_headersplit/headersplit.py b/tools/envoy_headersplit/headersplit.py index 60661516a179..7e26b3940df5 100644 --- a/tools/envoy_headersplit/headersplit.py +++ b/tools/envoy_headersplit/headersplit.py @@ -18,15 +18,15 @@ def to_filename(classname: str) -> str: """ - maps mock class name (in C++ codes) to filenames under the Envoy naming convention. - e.g. map "MockAdminStream" to "admin_stream" + maps mock class name (in C++ codes) to filenames under the Envoy naming convention. + e.g. map "MockAdminStream" to "admin_stream" - Args: - classname: mock class name from source + Args: + classname: mock class name from source - Returns: - corresponding file name - """ + Returns: + corresponding file name + """ filename = classname.replace("Mock", "", 1) # Remove only first "Mock" ret = "" for index, val in enumerate(filename): @@ -38,32 +38,32 @@ def to_filename(classname: str) -> str: def get_directives(translation_unit: Type[TranslationUnit]) -> str: """ - "extracts" all header includes statements and other directives from the target source code file - - for instance: - foo.h: - #pragma once - #include "a.h" - #include "b.h" - - int foo(){ - } - this function should return - '#pragma once\n#include "a.h"\n#include "b.h"' - - Args: - translation_unit: parsing result of target source code by libclang - - Returns: - A string, contains all includes statements and other preprocessor directives before the - first non-directive statement. - - Notes: - clang lib provides API like tranlation_unit.get_includes() to get include directives. - But we can't use it as it requires presence of the included files to return the full list. - We choose to return the string instead of list of includes since we will simply copy-paste - the include statements into generated headers. Return string seems more convenient - """ + "extracts" all header includes statements and other directives from the target source code file + + for instance: + foo.h: + #pragma once + #include "a.h" + #include "b.h" + + int foo(){ + } + this function should return + '#pragma once\n#include "a.h"\n#include "b.h"' + + Args: + translation_unit: parsing result of target source code by libclang + + Returns: + A string, contains all includes statements and other preprocessor directives before the + first non-directive statement. + + Notes: + clang lib provides API like tranlation_unit.get_includes() to get include directives. + But we can't use it as it requires presence of the included files to return the full list. + We choose to return the string instead of list of includes since we will simply copy-paste + the include statements into generated headers. Return string seems more convenient + """ cursor = translation_unit.cursor for descendant in cursor.walk_preorder(): if descendant.location.file is not None and descendant.location.file.name == cursor.displayname: @@ -75,14 +75,14 @@ def get_directives(translation_unit: Type[TranslationUnit]) -> str: def cursors_in_same_file(cursor: Cursor) -> List[Cursor]: """ - get all child cursors which are pointing to the same file as the input cursor + get all child cursors which are pointing to the same file as the input cursor - Args: - cursor: cursor of parsing result of target source code by libclang + Args: + cursor: cursor of parsing result of target source code by libclang - Returns: - a list of cursor - """ + Returns: + a list of cursor + """ cursors = [] for descendant in cursor.walk_preorder(): # We don't want Cursors from files other than the input file, @@ -99,14 +99,14 @@ def cursors_in_same_file(cursor: Cursor) -> List[Cursor]: def class_definitions(cursor: Cursor) -> List[Cursor]: """ - extracts all class definitions in the file pointed by cursor. (typical mocks.h) + extracts all class definitions in the file pointed by cursor. (typical mocks.h) - Args: - cursor: cursor of parsing result of target source code by libclang + Args: + cursor: cursor of parsing result of target source code by libclang - Returns: - a list of cursor, each pointing to a class definition. - """ + Returns: + a list of cursor, each pointing to a class definition. + """ cursors = cursors_in_same_file(cursor) class_cursors = [] for descendant in cursors: @@ -124,14 +124,14 @@ def class_definitions(cursor: Cursor) -> List[Cursor]: def class_implementations(cursor: Cursor) -> List[Cursor]: """ - extracts all class implementation in the file pointed by cursor. (typical mocks.cc) + extracts all class implementation in the file pointed by cursor. (typical mocks.cc) - Args: - cursor: cursor of parsing result of target source code by libclang + Args: + cursor: cursor of parsing result of target source code by libclang - Returns: - a list of cursor, each pointing to a class implementation. - """ + Returns: + a list of cursor, each pointing to a class implementation. + """ cursors = cursors_in_same_file(cursor) impl_cursors = [] for descendant in cursors: @@ -147,22 +147,22 @@ def class_implementations(cursor: Cursor) -> List[Cursor]: def extract_definition(cursor: Cursor, classnames: List[str]) -> Tuple[str, str, List[str]]: """ - extracts class definition source code pointed by the cursor parameter. - and find dependent mock classes by naming look up. - - Args: - cursor: libclang cursor pointing to the target mock class definition. - classnames: all mock class names defined in the definition header that needs to be - divided, used to parse class dependencies. - Returns: - class_name: a string representing the mock class name. - class_defn: a string contains the whole class definition body. - deps: a set of string contains all dependent classes for the return class. - - Note: - It can not detect and resolve forward declaration and cyclic dependency. Need to address - manually. - """ + extracts class definition source code pointed by the cursor parameter. + and find dependent mock classes by naming look up. + + Args: + cursor: libclang cursor pointing to the target mock class definition. + classnames: all mock class names defined in the definition header that needs to be + divided, used to parse class dependencies. + Returns: + class_name: a string representing the mock class name. + class_defn: a string contains the whole class definition body. + deps: a set of string contains all dependent classes for the return class. + + Note: + It can not detect and resolve forward declaration and cyclic dependency. Need to address + manually. + """ filename = cursor.location.file.name contents = read_file_contents(filename) class_name = cursor.spelling @@ -186,42 +186,42 @@ def extract_definition(cursor: Cursor, classnames: List[str]) -> Tuple[str, str, def get_implline(cursor: Cursor) -> int: """ - finds the first line of implementation source code for class method pointed by the cursor - parameter. - - Args: - cursor: libclang cursor pointing to the target mock class definition. - - Returns: - an integer, the line number of the first line of the corresponding method implementation - code (zero indexed) - - Note: - This function return line number only. Because in certain case libclang will fail in parsing - the method body and stops parsing early (see headersplit_test.test_class_implementations_error - for details). To address this issue when parsing implementation code, we passed the flag that - ask clang to ignore function bodies. - We can not get the function body directly with the same way we used in extract_definition() - since clang didn't parse function this time. Though we can't get the correct method extent - offset from Cursor, we can still get the start line of the corresponding method instead. - (We can't get the correct line number for the last line due to skipping function bodies) - """ + finds the first line of implementation source code for class method pointed by the cursor + parameter. + + Args: + cursor: libclang cursor pointing to the target mock class definition. + + Returns: + an integer, the line number of the first line of the corresponding method implementation + code (zero indexed) + + Note: + This function return line number only. Because in certain case libclang will fail in parsing + the method body and stops parsing early (see headersplit_test.test_class_implementations_error + for details). To address this issue when parsing implementation code, we passed the flag that + ask clang to ignore function bodies. + We can not get the function body directly with the same way we used in extract_definition() + since clang didn't parse function this time. Though we can't get the correct method extent + offset from Cursor, we can still get the start line of the corresponding method instead. + (We can't get the correct line number for the last line due to skipping function bodies) + """ return cursor.extent.start.line - 1 def extract_implementations(impl_cursors: List[Cursor], source_code: str) -> Dict[str, str]: """ - extracts method function body for each cursor in list impl_cursors from source code - groups those function bodies with class name to help generating the divided {classname}.cc - returns a dict maps class name to the concatenation of all its member methods implementations. + extracts method function body for each cursor in list impl_cursors from source code + groups those function bodies with class name to help generating the divided {classname}.cc + returns a dict maps class name to the concatenation of all its member methods implementations. - Args: - impl_cursors: list of cursors, each pointing to a mock class member function implementation. - source_code: string, the source code for implementations (e.g. mocks.cc) + Args: + impl_cursors: list of cursors, each pointing to a mock class member function implementation. + source_code: string, the source code for implementations (e.g. mocks.cc) - Returns: - classname_to_impl: a dict maps class name to its member methods implementations - """ + Returns: + classname_to_impl: a dict maps class name to its member methods implementations + """ classname_to_impl = dict() for i, cursor in enumerate(impl_cursors): classname = cursor.semantic_parent.spelling @@ -251,31 +251,31 @@ def extract_implementations(impl_cursors: List[Cursor], source_code: str) -> Dic def get_enclosing_namespace(defn: Cursor) -> Tuple[str, str]: """ - retrieves all enclosing namespaces for the class pointed by defn. - this is necessary to construct the mock class header - e.g.: - defn is pointing MockClass in the follow source code: - - namespace Envoy { - namespace Server { - class MockClass2 {...} - namespace Configuration { - class MockClass {...} - ^ - defn - } - } - } - - this function will return: - "namespace Envoy {\nnamespace Server {\nnamespace Configuration{\n" and "\n}\n}\n}\n" - - Args: - defn: libclang Cursor pointing to a mock class - - Returns: - namespace_prefix, namespace_suffix: a pair of string, representing the enclosing namespaces - """ + retrieves all enclosing namespaces for the class pointed by defn. + this is necessary to construct the mock class header + e.g.: + defn is pointing MockClass in the follow source code: + + namespace Envoy { + namespace Server { + class MockClass2 {...} + namespace Configuration { + class MockClass {...} + ^ + defn + } + } + } + + this function will return: + "namespace Envoy {\nnamespace Server {\nnamespace Configuration{\n" and "\n}\n}\n}\n" + + Args: + defn: libclang Cursor pointing to a mock class + + Returns: + namespace_prefix, namespace_suffix: a pair of string, representing the enclosing namespaces + """ namespace_prefix = "" namespace_suffix = "" parent_cursor = defn.semantic_parent @@ -318,8 +318,8 @@ def write_file_contents(class_name, class_defn, class_impl): def main(args): """ - divides the monolithic mock file into different mock class files. - """ + divides the monolithic mock file into different mock class files. + """ decl_filename = args["decl"] impl_filename = args["impl"] idx = Index.create() diff --git a/tools/envoy_headersplit/replace_includes.py b/tools/envoy_headersplit/replace_includes.py index f418cd37b743..bc962d9e8adf 100644 --- a/tools/envoy_headersplit/replace_includes.py +++ b/tools/envoy_headersplit/replace_includes.py @@ -20,16 +20,16 @@ class Server::MockAdmin. def to_classname(filename: str) -> str: """ - maps divided mock class file name to class names - inverse function of headersplit.to_filename - e.g. map "test/mocks/server/admin_stream.h" to "MockAdminStream" + maps divided mock class file name to class names + inverse function of headersplit.to_filename + e.g. map "test/mocks/server/admin_stream.h" to "MockAdminStream" - Args: - filename: string, mock class header file name (might be the whole path instead of the base name) + Args: + filename: string, mock class header file name (might be the whole path instead of the base name) - Returns: - corresponding class name - """ + Returns: + corresponding class name + """ classname_tokens = filename.split('/')[-1].replace('.h', '').split('_') classname = "Mock" + ''.join(map(lambda x: x[:1].upper() + x[1:], classname_tokens)) return classname @@ -37,16 +37,16 @@ def to_classname(filename: str) -> str: def to_bazelname(filename: str, mockname: str) -> str: """ - maps divided mock class file name to bazel target name - e.g. map "test/mocks/server/admin_stream.h" to "//test/mocks/server:admin_stream_mocks" + maps divided mock class file name to bazel target name + e.g. map "test/mocks/server/admin_stream.h" to "//test/mocks/server:admin_stream_mocks" - Args: - filename: string, mock class header file name (might be the whole path instead of the base name) - mockname: string, mock directory name + Args: + filename: string, mock class header file name (might be the whole path instead of the base name) + mockname: string, mock directory name - Returns: - corresponding bazel target name - """ + Returns: + corresponding bazel target name + """ bazelname = "//test/mocks/{}:".format(mockname) bazelname += filename.split('/')[-1].replace('.h', '') + '_mocks'.format(mockname) return bazelname @@ -54,14 +54,14 @@ def to_bazelname(filename: str, mockname: str) -> str: def get_filenames(mockname: str) -> List[str]: """ - scans all headers in test/mocks/{mockname}, return corresponding file names + scans all headers in test/mocks/{mockname}, return corresponding file names - Args: - mockname: string, mock directory name + Args: + mockname: string, mock directory name - Returns: - List of file name for the headers in test/mock/{mocksname} - """ + Returns: + List of file name for the headers in test/mock/{mocksname} + """ dir = Path("test/mocks/{}/".format(mockname)) filenames = list(map(str, dir.glob('*.h'))) return filenames diff --git a/tools/proto_format/proto_sync.py b/tools/proto_format/proto_sync.py index 6bcb1d6dc636..047566c91b17 100755 --- a/tools/proto_format/proto_sync.py +++ b/tools/proto_format/proto_sync.py @@ -73,18 +73,18 @@ def __init__(self, message): def get_directory_from_package(package): """Get directory path from package name or full qualified message name - Args: - package: the full qualified name of package or message. - """ + Args: + package: the full qualified name of package or message. + """ return '/'.join(s for s in package.split('.') if s and s[0].islower()) def get_destination_path(src): """Obtain destination path from a proto file path by reading its package statement. - Args: - src: source path - """ + Args: + src: source path + """ src_path = pathlib.Path(src) contents = src_path.read_text(encoding='utf8') matches = re.findall(PACKAGE_REGEX, contents) @@ -98,12 +98,12 @@ def get_destination_path(src): def get_abs_rel_destination_path(dst_root, src): """Obtain absolute path from a proto file path combined with destination root. - Creates the parent directory if necessary. + Creates the parent directory if necessary. - Args: - dst_root: destination root path. - src: source path. - """ + Args: + dst_root: destination root path. + src: source path. + """ rel_dst_path = get_destination_path(src) dst = dst_root.joinpath(rel_dst_path) dst.parent.mkdir(0o755, parents=True, exist_ok=True) @@ -113,10 +113,10 @@ def get_abs_rel_destination_path(dst_root, src): def proto_print(src, dst): """Pretty-print FileDescriptorProto to a destination file. - Args: - src: source path for FileDescriptorProto. - dst: destination path for formatted proto. - """ + Args: + src: source path for FileDescriptorProto. + dst: destination path for formatted proto. + """ print('proto_print %s' % dst) subprocess.check_output([ 'bazel-bin/tools/protoxform/protoprint', src, @@ -128,11 +128,11 @@ def proto_print(src, dst): def merge_active_shadow(active_src, shadow_src, dst): """Merge active/shadow FileDescriptorProto to a destination file. - Args: - active_src: source path for active FileDescriptorProto. - shadow_src: source path for active FileDescriptorProto. - dst: destination path for FileDescriptorProto. - """ + Args: + active_src: source path for active FileDescriptorProto. + shadow_src: source path for active FileDescriptorProto. + dst: destination path for FileDescriptorProto. + """ print('merge_active_shadow %s' % dst) subprocess.check_output([ 'bazel-bin/tools/protoxform/merge_active_shadow', @@ -145,14 +145,14 @@ def merge_active_shadow(active_src, shadow_src, dst): def sync_proto_file(dst_srcs): """Pretty-print a proto descriptor from protoxform.py Bazel cache artifacts." - In the case where we are generating an Envoy internal shadow, it may be - necessary to combine the current active proto, subject to hand editing, with - shadow artifacts from the previous verion; this is done via - merge_active_shadow(). + In the case where we are generating an Envoy internal shadow, it may be + necessary to combine the current active proto, subject to hand editing, with + shadow artifacts from the previous verion; this is done via + merge_active_shadow(). - Args: - dst_srcs: destination/sources path tuple. - """ + Args: + dst_srcs: destination/sources path tuple. + """ dst, srcs = dst_srcs assert (len(srcs) > 0) # If we only have one candidate source for a destination, just pretty-print. @@ -184,12 +184,12 @@ def sync_proto_file(dst_srcs): def get_import_deps(proto_path): """Obtain the Bazel dependencies for the import paths from a .proto file. - Args: - proto_path: path to .proto. + Args: + proto_path: path to .proto. - Returns: - A list of Bazel targets reflecting the imports in the .proto at proto_path. - """ + Returns: + A list of Bazel targets reflecting the imports in the .proto at proto_path. + """ imports = [] with open(proto_path, 'r', encoding='utf8') as f: for line in f: @@ -227,14 +227,14 @@ def get_import_deps(proto_path): def get_previous_message_type_deps(proto_path): """Obtain the Bazel dependencies for the previous version of messages in a .proto file. - We need to link in earlier proto descriptors to support Envoy reflection upgrades. + We need to link in earlier proto descriptors to support Envoy reflection upgrades. - Args: - proto_path: path to .proto. + Args: + proto_path: path to .proto. - Returns: - A list of Bazel targets reflecting the previous message types in the .proto at proto_path. - """ + Returns: + A list of Bazel targets reflecting the previous message types in the .proto at proto_path. + """ contents = pathlib.Path(proto_path).read_text(encoding='utf8') matches = re.findall(PREVIOUS_MESSAGE_TYPE_REGEX, contents) deps = [] @@ -247,12 +247,12 @@ def get_previous_message_type_deps(proto_path): def has_services(proto_path): """Does a .proto file have any service definitions? - Args: - proto_path: path to .proto. + Args: + proto_path: path to .proto. - Returns: - True iff there are service definitions in the .proto at proto_path. - """ + Returns: + True iff there are service definitions in the .proto at proto_path. + """ with open(proto_path, 'r', encoding='utf8') as f: for line in f: if re.match(SERVICE_REGEX, line): @@ -268,13 +268,13 @@ def build_order_key(key): def build_file_contents(root, files): """Compute the canonical BUILD contents for an api/ proto directory. - Args: - root: base path to directory. - files: a list of files in the directory. + Args: + root: base path to directory. + files: a list of files in the directory. - Returns: - A string containing the canonical BUILD file content for root. - """ + Returns: + A string containing the canonical BUILD file content for root. + """ import_deps = set(sum([get_import_deps(os.path.join(root, f)) for f in files], [])) history_deps = set( sum([get_previous_message_type_deps(os.path.join(root, f)) for f in files], [])) @@ -297,9 +297,9 @@ def build_file_contents(root, files): def sync_build_files(cmd, dst_root): """Diff or in-place update api/ BUILD files. - Args: - cmd: 'check' or 'fix'. - """ + Args: + cmd: 'check' or 'fix'. + """ for root, dirs, files in os.walk(str(dst_root)): is_proto_dir = any(f.endswith('.proto') for f in files) if not is_proto_dir: @@ -312,12 +312,12 @@ def sync_build_files(cmd, dst_root): def generate_current_api_dir(api_dir, dst_dir): """Helper function to generate original API repository to be compared with diff. - This copies the original API repository and deletes file we don't want to compare. + This copies the original API repository and deletes file we don't want to compare. - Args: - api_dir: the original api directory - dst_dir: the api directory to be compared in temporary directory - """ + Args: + api_dir: the original api directory + dst_dir: the api directory to be compared in temporary directory + """ dst = dst_dir.joinpath("envoy") shutil.copytree(str(api_dir.joinpath("envoy")), str(dst)) @@ -335,12 +335,12 @@ def git_status(path): def git_modified_files(path, suffix): """Obtain a list of modified files since the last commit merged by GitHub. - Args: - path: path to examine. - suffix: path suffix to filter with. - Return: - A list of strings providing the paths of modified files in the repo. - """ + Args: + path: path to examine. + suffix: path suffix to filter with. + Return: + A list of strings providing the paths of modified files in the repo. + """ try: modified_files = subprocess.check_output( ['tools/git/modified_since_last_github_commit.sh', 'api', 'proto']).decode().split() diff --git a/tools/protodoc/protodoc.py b/tools/protodoc/protodoc.py index 7593f6f92f6e..d464f47918e9 100755 --- a/tools/protodoc/protodoc.py +++ b/tools/protodoc/protodoc.py @@ -137,12 +137,12 @@ def hide_not_implemented(comment): def github_url(type_context): """Obtain data plane API Github URL by path from a TypeContext. - Args: - type_context: type_context.TypeContext for node. + Args: + type_context: type_context.TypeContext for node. - Returns: - A string with a corresponding data plane API GitHub Url. - """ + Returns: + A string with a corresponding data plane API GitHub Url. + """ if type_context.location is not None: return DATA_PLANE_API_URL_FMT % (type_context.source_code_info.name, type_context.location.span[0]) @@ -152,14 +152,14 @@ def github_url(type_context): def format_comment_with_annotations(comment, type_name=''): """Format a comment string with additional RST for annotations. - Args: - comment: comment string. - type_name: optional, 'message' or 'enum' may be specified for additional - message/enum specific annotations. + Args: + comment: comment string. + type_name: optional, 'message' or 'enum' may be specified for additional + message/enum specific annotations. - Returns: - A string with additional RST from annotations. - """ + Returns: + A string with additional RST from annotations. + """ formatted_extension = '' if annotations.EXTENSION_ANNOTATION in comment.annotations: extension = comment.annotations[annotations.EXTENSION_ANNOTATION] @@ -175,13 +175,13 @@ def format_comment_with_annotations(comment, type_name=''): def map_lines(f, s): """Apply a function across each line in a flat string. - Args: - f: A string transform function for a line. - s: A string consisting of potentially multiple lines. + Args: + f: A string transform function for a line. + s: A string consisting of potentially multiple lines. - Returns: - A flat string with f applied to each line. - """ + Returns: + A flat string with f applied to each line. + """ return '\n'.join(f(line) for line in s.split('\n')) @@ -206,25 +206,25 @@ def format_external_link(text, ref): def format_header(style, text): """Format RST header. - Args: - style: underline style, e.g. '=', '-'. - text: header text + Args: + style: underline style, e.g. '=', '-'. + text: header text - Returns: - RST formatted header. - """ + Returns: + RST formatted header. + """ return '%s\n%s\n\n' % (text, style * len(text)) def format_extension(extension): """Format extension metadata as RST. - Args: - extension: the name of the extension, e.g. com.acme.foo. + Args: + extension: the name of the extension, e.g. com.acme.foo. - Returns: - RST formatted extension description. - """ + Returns: + RST formatted extension description. + """ try: extension_metadata = EXTENSION_DB[extension] status = EXTENSION_STATUS_VALUES.get(extension_metadata['status'], '') @@ -245,12 +245,12 @@ def format_extension(extension): def format_extension_category(extension_category): """Format extension metadata as RST. - Args: - extension_category: the name of the extension_category, e.g. com.acme. + Args: + extension_category: the name of the extension_category, e.g. com.acme. - Returns: - RST formatted extension category description. - """ + Returns: + RST formatted extension category description. + """ try: extensions = EXTENSION_CATEGORIES[extension_category] except KeyError as e: @@ -262,15 +262,15 @@ def format_extension_category(extension_category): def format_header_from_file(style, source_code_info, proto_name): """Format RST header based on special file level title - Args: - style: underline style, e.g. '=', '-'. - source_code_info: SourceCodeInfo object. - proto_name: If the file_level_comment does not contain a user specified - title, use this as page title. + Args: + style: underline style, e.g. '=', '-'. + source_code_info: SourceCodeInfo object. + proto_name: If the file_level_comment does not contain a user specified + title, use this as page title. - Returns: - RST formatted header, and file level comment without page title strings. - """ + Returns: + RST formatted header, and file level comment without page title strings. + """ anchor = format_anchor(file_cross_ref_label(proto_name)) stripped_comment = annotations.without_annotations( strip_leading_space('\n'.join(c + '\n' for c in source_code_info.file_level_comments))) @@ -288,11 +288,11 @@ def format_header_from_file(style, source_code_info, proto_name): def format_field_type_as_json(type_context, field): """Format FieldDescriptorProto.Type as a pseudo-JSON string. - Args: - type_context: contextual information for message/enum/field. - field: FieldDescriptor proto. - Return: RST formatted pseudo-JSON string representation of field type. - """ + Args: + type_context: contextual information for message/enum/field. + field: FieldDescriptor proto. + Return: RST formatted pseudo-JSON string representation of field type. + """ if type_name_from_fqn(field.type_name) in type_context.map_typenames: return '"{...}"' if field.label == field.LABEL_REPEATED: @@ -305,11 +305,11 @@ def format_field_type_as_json(type_context, field): def format_message_as_json(type_context, msg): """Format a message definition DescriptorProto as a pseudo-JSON block. - Args: - type_context: contextual information for message/enum/field. - msg: message definition DescriptorProto. - Return: RST formatted pseudo-JSON string representation of message definition. - """ + Args: + type_context: contextual information for message/enum/field. + msg: message definition DescriptorProto. + Return: RST formatted pseudo-JSON string representation of message definition. + """ lines = [] for index, field in enumerate(msg.field): field_type_context = type_context.extend_field(index, field.name) @@ -327,14 +327,14 @@ def format_message_as_json(type_context, msg): def normalize_field_type_name(field_fqn): """Normalize a fully qualified field type name, e.g. - .envoy.foo.bar. + .envoy.foo.bar. - Strips leading ENVOY_API_NAMESPACE_PREFIX and ENVOY_PREFIX. + Strips leading ENVOY_API_NAMESPACE_PREFIX and ENVOY_PREFIX. - Args: - field_fqn: a fully qualified type name from FieldDescriptorProto.type_name. - Return: Normalized type name. - """ + Args: + field_fqn: a fully qualified type name from FieldDescriptorProto.type_name. + Return: Normalized type name. + """ if field_fqn.startswith(ENVOY_API_NAMESPACE_PREFIX): return field_fqn[len(ENVOY_API_NAMESPACE_PREFIX):] if field_fqn.startswith(ENVOY_PREFIX): @@ -345,14 +345,14 @@ def normalize_field_type_name(field_fqn): def normalize_type_context_name(type_name): """Normalize a type name, e.g. - envoy.foo.bar. + envoy.foo.bar. - Strips leading ENVOY_API_NAMESPACE_PREFIX and ENVOY_PREFIX. + Strips leading ENVOY_API_NAMESPACE_PREFIX and ENVOY_PREFIX. - Args: - type_name: a name from a TypeContext. - Return: Normalized type name. - """ + Args: + type_name: a name from a TypeContext. + Return: Normalized type name. + """ return normalize_field_type_name(qualify_type_name(type_name)) @@ -367,14 +367,14 @@ def type_name_from_fqn(fqn): def format_field_type(type_context, field): """Format a FieldDescriptorProto type description. - Adds cross-refs for message types. - TODO(htuch): Add cross-refs for enums as well. + Adds cross-refs for message types. + TODO(htuch): Add cross-refs for enums as well. - Args: - type_context: contextual information for message/enum/field. - field: FieldDescriptor proto. - Return: RST formatted field type. - """ + Args: + type_context: contextual information for message/enum/field. + field: FieldDescriptor proto. + Return: RST formatted field type. + """ if field.type_name.startswith(ENVOY_API_NAMESPACE_PREFIX) or field.type_name.startswith( ENVOY_PREFIX): type_name = normalize_field_type_name(field.type_name) @@ -490,15 +490,15 @@ def format_field_as_definition_list_item(outer_type_context, type_context, field protodoc_manifest): """Format a FieldDescriptorProto as RST definition list item. - Args: - outer_type_context: contextual information for enclosing message. - type_context: contextual information for message/enum/field. - field: FieldDescriptorProto. - protodoc_manifest: tools.protodoc.Manifest for proto. + Args: + outer_type_context: contextual information for enclosing message. + type_context: contextual information for message/enum/field. + field: FieldDescriptorProto. + protodoc_manifest: tools.protodoc.Manifest for proto. - Returns: - RST formatted definition list item. - """ + Returns: + RST formatted definition list item. + """ field_annotations = [] anchor = format_anchor(field_cross_ref_label(normalize_type_context_name(type_context.name))) @@ -566,14 +566,14 @@ def format_field_as_definition_list_item(outer_type_context, type_context, field def format_message_as_definition_list(type_context, msg, protodoc_manifest): """Format a DescriptorProto as RST definition list. - Args: - type_context: contextual information for message/enum/field. - msg: DescriptorProto. - protodoc_manifest: tools.protodoc.Manifest for proto. + Args: + type_context: contextual information for message/enum/field. + msg: DescriptorProto. + protodoc_manifest: tools.protodoc.Manifest for proto. - Returns: - RST formatted definition list item. - """ + Returns: + RST formatted definition list item. + """ type_context.oneof_fields = defaultdict(list) type_context.oneof_required = defaultdict(bool) type_context.oneof_names = defaultdict(list) @@ -597,13 +597,13 @@ def format_message_as_definition_list(type_context, msg, protodoc_manifest): def format_enum_value_as_definition_list_item(type_context, enum_value): """Format a EnumValueDescriptorProto as RST definition list item. - Args: - type_context: contextual information for message/enum/field. - enum_value: EnumValueDescriptorProto. + Args: + type_context: contextual information for message/enum/field. + enum_value: EnumValueDescriptorProto. - Returns: - RST formatted definition list item. - """ + Returns: + RST formatted definition list item. + """ anchor = format_anchor( enum_value_cross_ref_label(normalize_type_context_name(type_context.name))) default_comment = '*(DEFAULT)* ' if enum_value.number == 0 else '' @@ -618,13 +618,13 @@ def format_enum_value_as_definition_list_item(type_context, enum_value): def format_enum_as_definition_list(type_context, enum): """Format a EnumDescriptorProto as RST definition list. - Args: - type_context: contextual information for message/enum/field. - enum: DescriptorProto. + Args: + type_context: contextual information for message/enum/field. + enum: DescriptorProto. - Returns: - RST formatted definition list item. - """ + Returns: + RST formatted definition list item. + """ return '\n'.join( format_enum_value_as_definition_list_item( type_context.extend_enum_value(index, enum_value.name), enum_value) @@ -634,16 +634,16 @@ def format_enum_as_definition_list(type_context, enum): def format_proto_as_block_comment(proto): """Format a proto as a RST block comment. - Useful in debugging, not usually referenced. - """ + Useful in debugging, not usually referenced. + """ return '\n\nproto::\n\n' + map_lines(functools.partial(indent, 2), str(proto)) + '\n' class RstFormatVisitor(visitor.Visitor): """Visitor to generate a RST representation from a FileDescriptor proto. - See visitor.Visitor for visitor method docs comments. - """ + See visitor.Visitor for visitor method docs comments. + """ def __init__(self): r = runfiles.Create() diff --git a/tools/protoxform/migrate.py b/tools/protoxform/migrate.py index 61a53f3a2e10..052b29a23245 100644 --- a/tools/protoxform/migrate.py +++ b/tools/protoxform/migrate.py @@ -21,8 +21,8 @@ class UpgradeVisitor(visitor.Visitor): """Visitor to generate an upgraded proto from a FileDescriptor proto. - See visitor.Visitor for visitor method docs comments. - """ + See visitor.Visitor for visitor method docs comments. + """ def __init__(self, n, typedb, envoy_internal_shadow, package_version_status): self._base_version = n @@ -98,10 +98,10 @@ def _upgraded_type(self, t): def _deprecate(self, proto, field_or_value): """Deprecate a field or value in a message/enum proto. - Args: - proto: DescriptorProto or EnumDescriptorProto message. - field_or_value: field or value inside proto. - """ + Args: + proto: DescriptorProto or EnumDescriptorProto message. + field_or_value: field or value inside proto. + """ if self._envoy_internal_shadow: field_or_value.name = 'hidden_envoy_deprecated_' + field_or_value.name else: @@ -114,10 +114,10 @@ def _deprecate(self, proto, field_or_value): def _rename(self, proto, migrate_annotation): """Rename a field/enum/service/message - Args: - proto: DescriptorProto or corresponding proto message - migrate_annotation: udpa.annotations.MigrateAnnotation message - """ + Args: + proto: DescriptorProto or corresponding proto message + migrate_annotation: udpa.annotations.MigrateAnnotation message + """ if migrate_annotation.rename: proto.name = migrate_annotation.rename migrate_annotation.rename = "" @@ -125,11 +125,11 @@ def _rename(self, proto, migrate_annotation): def _oneof_promotion(self, msg_proto, field_proto, migrate_annotation): """Promote a field to a oneof. - Args: - msg_proto: DescriptorProto for message containing field. - field_proto: FieldDescriptorProto for field. - migrate_annotation: udpa.annotations.FieldMigrateAnnotation message - """ + Args: + msg_proto: DescriptorProto for message containing field. + field_proto: FieldDescriptorProto for field. + migrate_annotation: udpa.annotations.FieldMigrateAnnotation message + """ if migrate_annotation.oneof_promotion: oneof_index = -1 for n, oneof_decl in enumerate(msg_proto.oneof_decl): @@ -244,15 +244,15 @@ def visit_file(self, file_proto, type_context, services, msgs, enums): def version_upgrade_xform(n, envoy_internal_shadow, file_proto, params): """Transform a FileDescriptorProto from vN[alpha\d] to v(N+1). - Args: - n: version N to upgrade from. - envoy_internal_shadow: generate a shadow for Envoy internal use containing deprecated fields. - file_proto: vN[alpha\d] FileDescriptorProto message. - params: plugin parameters. + Args: + n: version N to upgrade from. + envoy_internal_shadow: generate a shadow for Envoy internal use containing deprecated fields. + file_proto: vN[alpha\d] FileDescriptorProto message. + params: plugin parameters. - Returns: - v(N+1) FileDescriptorProto message. - """ + Returns: + v(N+1) FileDescriptorProto message. + """ # Load type database. if params['type_db_path']: utils.load_type_db(params['type_db_path']) diff --git a/tools/protoxform/options.py b/tools/protoxform/options.py index 41dd5a3e517d..2bff6cf02259 100644 --- a/tools/protoxform/options.py +++ b/tools/protoxform/options.py @@ -6,11 +6,11 @@ def add_hide_option(options): """Mark message/enum/field/enum value as hidden. - Hidden messages are ignored when generating output. + Hidden messages are ignored when generating output. - Args: - options: MessageOptions/EnumOptions/FieldOptions/EnumValueOptions message. - """ + Args: + options: MessageOptions/EnumOptions/FieldOptions/EnumValueOptions message. + """ hide_option = options.uninterpreted_option.add() hide_option.name.add().name_part = 'protoxform_hide' @@ -18,13 +18,13 @@ def add_hide_option(options): def has_hide_option(options): """Is message/enum/field/enum value hidden? - Hidden messages are ignored when generating output. + Hidden messages are ignored when generating output. - Args: - options: MessageOptions/EnumOptions/FieldOptions/EnumValueOptions message. - Returns: - Hidden status. - """ + Args: + options: MessageOptions/EnumOptions/FieldOptions/EnumValueOptions message. + Returns: + Hidden status. + """ return any( option.name[0].name_part == 'protoxform_hide' for option in options.uninterpreted_option) @@ -32,25 +32,25 @@ def has_hide_option(options): def set_versioning_annotation(options, previous_message_type): """Set the udpa.annotations.versioning option. - Used by Envoy to chain back through the message type history. + Used by Envoy to chain back through the message type history. - Args: - options: MessageOptions message. - previous_message_type: string with earlier API type name for the message. - """ + Args: + options: MessageOptions message. + previous_message_type: string with earlier API type name for the message. + """ options.Extensions[versioning_pb2.versioning].previous_message_type = previous_message_type def get_versioning_annotation(options): """Get the udpa.annotations.versioning option. - Used by Envoy to chain back through the message type history. + Used by Envoy to chain back through the message type history. - Args: - options: MessageOptions message. - Returns: - versioning.Annotation if set otherwise None. - """ + Args: + options: MessageOptions message. + Returns: + versioning.Annotation if set otherwise None. + """ if not options.HasExtension(versioning_pb2.versioning): return None return options.Extensions[versioning_pb2.versioning] diff --git a/tools/protoxform/protoprint.py b/tools/protoxform/protoprint.py index 3a3f245cfd89..c1fb714fe639 100755 --- a/tools/protoxform/protoprint.py +++ b/tools/protoxform/protoprint.py @@ -41,12 +41,12 @@ class ProtoPrintError(Exception): def extract_clang_proto_style(clang_format_text): """Extract a key:value dictionary for proto formatting. - Args: - clang_format_text: text from a .clang-format file. + Args: + clang_format_text: text from a .clang-format file. - Returns: - key:value dictionary suitable for passing to clang-format --style. - """ + Returns: + key:value dictionary suitable for passing to clang-format --style. + """ lang = None format_dict = {} for line in clang_format_text.split('\n'): @@ -71,12 +71,12 @@ def extract_clang_proto_style(clang_format_text): def clang_format(contents): """Run proto-style oriented clang-format over given string. - Args: - contents: a string with proto contents. + Args: + contents: a string with proto contents. - Returns: - clang-formatted string - """ + Returns: + clang-formatted string + """ return subprocess.run( ['clang-format', '--style=%s' % CLANG_FORMAT_STYLE, '--assume-filename=.proto'], @@ -87,14 +87,14 @@ def clang_format(contents): def format_block(block): """Append \n to a .proto section (e.g. - comment, message definition, etc.) if non-empty. + comment, message definition, etc.) if non-empty. - Args: - block: a string representing the section. + Args: + block: a string representing the section. - Returns: - A string with appropriate whitespace. - """ + Returns: + A string with appropriate whitespace. + """ if block.strip(): return block + '\n' return '' @@ -103,15 +103,15 @@ def format_block(block): def format_comments(comments): """Format a list of comment blocks from SourceCodeInfo. - Prefixes // to each line, separates blocks by spaces. + Prefixes // to each line, separates blocks by spaces. - Args: - comments: a list of blocks, each block is a list of strings representing - lines in each block. + Args: + comments: a list of blocks, each block is a list of strings representing + lines in each block. - Returns: - A string reprenting the formatted comment blocks. - """ + Returns: + A string reprenting the formatted comment blocks. + """ # TODO(htuch): not sure why this is needed, but clang-format does some weird # stuff with // comment indents when we have these trailing \ @@ -128,12 +128,12 @@ def fixup_trailing_backslash(s): def create_next_free_field_xform(msg_proto): """Return the next free field number annotation transformer of a message. - Args: - msg_proto: DescriptorProto for message. + Args: + msg_proto: DescriptorProto for message. - Returns: - the next free field number annotation transformer. - """ + Returns: + the next free field number annotation transformer. + """ next_free = max( sum([ [f.number + 1 for f in msg_proto.field], @@ -146,14 +146,14 @@ def create_next_free_field_xform(msg_proto): def format_type_context_comments(type_context, annotation_xforms=None): """Format the leading/trailing comments in a given TypeContext. - Args: - type_context: contextual information for message/enum/field. - annotation_xforms: a dict of transformers for annotations in leading - comment. + Args: + type_context: contextual information for message/enum/field. + annotation_xforms: a dict of transformers for annotations in leading + comment. - Returns: - Tuple of formatted leading and trailing comment blocks. - """ + Returns: + Tuple of formatted leading and trailing comment blocks. + """ leading_comment = type_context.leading_comment if annotation_xforms: leading_comment = leading_comment.get_comment_with_transforms(annotation_xforms) @@ -165,14 +165,14 @@ def format_type_context_comments(type_context, annotation_xforms=None): def format_header_from_file(source_code_info, file_proto, empty_file): """Format proto header. - Args: - source_code_info: SourceCodeInfo object. - file_proto: FileDescriptorProto for file. - empty_file: are there no message/enum/service defs in file? + Args: + source_code_info: SourceCodeInfo object. + file_proto: FileDescriptorProto for file. + empty_file: are there no message/enum/service defs in file? - Returns: - Formatted proto header as a string. - """ + Returns: + Formatted proto header as a string. + """ # Load the type database. typedb = utils.get_type_db() # Figure out type dependencies in this .proto. @@ -293,17 +293,17 @@ def format_public_import_block(xs): def normalize_field_type_name(type_context, field_fqn): """Normalize a fully qualified field type name, e.g. - .envoy.foo.bar is normalized to foo.bar. + .envoy.foo.bar is normalized to foo.bar. - Considers type context to minimize type prefix. + Considers type context to minimize type prefix. - Args: - field_fqn: a fully qualified type name from FieldDescriptorProto.type_name. - type_context: contextual information for message/enum/field. + Args: + field_fqn: a fully qualified type name from FieldDescriptorProto.type_name. + type_context: contextual information for message/enum/field. - Returns: - Normalized type name as a string. - """ + Returns: + Normalized type name as a string. + """ if field_fqn.startswith('.'): # Let's say we have type context namespace a.b.c.d.e and the type we're # trying to normalize is a.b.d.e. We take (from the end) on package fragment @@ -379,13 +379,13 @@ def type_name_from_fqn(fqn): def format_field_type(type_context, field): """Format a FieldDescriptorProto type description. - Args: - type_context: contextual information for message/enum/field. - field: FieldDescriptor proto. + Args: + type_context: contextual information for message/enum/field. + field: FieldDescriptor proto. - Returns: - Formatted proto field type as string. - """ + Returns: + Formatted proto field type as string. + """ label = 'repeated ' if field.label == field.LABEL_REPEATED else '' type_name = label + normalize_field_type_name(type_context, field.type_name) @@ -424,13 +424,13 @@ def format_field_type(type_context, field): def format_service_method(type_context, method): """Format a service MethodDescriptorProto. - Args: - type_context: contextual information for method. - method: MethodDescriptorProto proto. + Args: + type_context: contextual information for method. + method: MethodDescriptorProto proto. - Returns: - Formatted service method as string. - """ + Returns: + Formatted service method as string. + """ def format_streaming(s): return 'stream ' if s else '' @@ -446,13 +446,13 @@ def format_streaming(s): def format_field(type_context, field): """Format FieldDescriptorProto as a proto field. - Args: - type_context: contextual information for message/enum/field. - field: FieldDescriptor proto. + Args: + type_context: contextual information for message/enum/field. + field: FieldDescriptor proto. - Returns: - Formatted proto field as a string. - """ + Returns: + Formatted proto field as a string. + """ if protoxform_options.has_hide_option(field.options): return '' leading_comment, trailing_comment = format_type_context_comments(type_context) @@ -465,13 +465,13 @@ def format_field(type_context, field): def format_enum_value(type_context, value): """Format a EnumValueDescriptorProto as a proto enum value. - Args: - type_context: contextual information for message/enum/field. - value: EnumValueDescriptorProto. + Args: + type_context: contextual information for message/enum/field. + value: EnumValueDescriptorProto. - Returns: - Formatted proto enum value as a string. - """ + Returns: + Formatted proto enum value as a string. + """ if protoxform_options.has_hide_option(value.options): return '' leading_comment, trailing_comment = format_type_context_comments(type_context) @@ -483,13 +483,13 @@ def format_enum_value(type_context, value): def text_format_value(field, value): """Format the value as protobuf text format - Args: - field: a FieldDescriptor that describes the field - value: the value stored in the field + Args: + field: a FieldDescriptor that describes the field + value: the value stored in the field - Returns: - value in protobuf text format - """ + Returns: + value in protobuf text format + """ out = io.StringIO() text_format.PrintFieldValue(field, value, out) return out.getvalue() @@ -498,14 +498,14 @@ def text_format_value(field, value): def format_options(options): """Format *Options (e.g. - MessageOptions, FieldOptions) message. + MessageOptions, FieldOptions) message. - Args: - options: A *Options (e.g. MessageOptions, FieldOptions) message. + Args: + options: A *Options (e.g. MessageOptions, FieldOptions) message. - Returns: - Formatted options as a string. - """ + Returns: + Formatted options as a string. + """ formatted_options = [] for option_descriptor, option_value in sorted(options.ListFields(), key=lambda x: x[0].number): @@ -532,12 +532,12 @@ def format_options(options): def format_reserved(enum_or_msg_proto): """Format reserved values/names in a [Enum]DescriptorProto. - Args: - enum_or_msg_proto: [Enum]DescriptorProto message. + Args: + enum_or_msg_proto: [Enum]DescriptorProto message. - Returns: - Formatted enum_or_msg_proto as a string. - """ + Returns: + Formatted enum_or_msg_proto as a string. + """ rrs = copy.deepcopy(enum_or_msg_proto.reserved_range) # Fixups for singletons that don't seem to always have [inclusive, exclusive) # format when parsed by protoc. @@ -556,8 +556,8 @@ def format_reserved(enum_or_msg_proto): class ProtoFormatVisitor(visitor.Visitor): """Visitor to generate a proto representation from a FileDescriptor proto. - See visitor.Visitor for visitor method docs comments. - """ + See visitor.Visitor for visitor method docs comments. + """ def visit_service(self, service_proto, type_context): leading_comment, trailing_comment = format_type_context_comments(type_context) diff --git a/tools/protoxform/protoxform.py b/tools/protoxform/protoxform.py index 14d5d310a2fa..3cd6fd7f5bb8 100755 --- a/tools/protoxform/protoxform.py +++ b/tools/protoxform/protoxform.py @@ -26,8 +26,8 @@ class ProtoXformError(Exception): class ProtoFormatVisitor(visitor.Visitor): """Visitor to generate a proto representation from a FileDescriptor proto. - See visitor.Visitor for visitor method docs comments. - """ + See visitor.Visitor for visitor method docs comments. + """ def __init__(self, active_or_frozen, params): if params['type_db_path']: diff --git a/tools/protoxform/protoxform_test_helper.py b/tools/protoxform/protoxform_test_helper.py index ff4317d6fb72..b1d05abd55a9 100755 --- a/tools/protoxform/protoxform_test_helper.py +++ b/tools/protoxform/protoxform_test_helper.py @@ -14,12 +14,12 @@ def path_and_filename(label): """Retrieve actual path and filename from bazel label - Args: - label: bazel label to specify target proto. + Args: + label: bazel label to specify target proto. - Returns: - actual path and filename - """ + Returns: + actual path and filename + """ if label.startswith('/'): label = label.replace('//', '/', 1) elif label.startswith('@'): @@ -34,14 +34,14 @@ def path_and_filename(label): def golden_proto_file(path, filename, version): """Retrieve golden proto file path. In general, those are placed in tools/testdata/protoxform. - Args: - path: target proto path - filename: target proto filename - version: api version to specify target golden proto filename + Args: + path: target proto path + filename: target proto filename + version: api version to specify target golden proto filename - Returns: - actual golden proto absolute path - """ + Returns: + actual golden proto absolute path + """ base = "./" base += path + "/" + filename + "." + version + ".gold" return os.path.abspath(base) @@ -50,10 +50,10 @@ def golden_proto_file(path, filename, version): def proto_print(src, dst): """Pretty-print FileDescriptorProto to a destination file. - Args: - src: source path for FileDescriptorProto. - dst: destination path for formatted proto. - """ + Args: + src: source path for FileDescriptorProto. + dst: destination path for formatted proto. + """ print('proto_print %s -> %s' % (src, dst)) subprocess.check_call([ 'bazel-bin/tools/protoxform/protoprint', src, dst, @@ -64,16 +64,16 @@ def proto_print(src, dst): def result_proto_file(cmd, path, tmp, filename, version): """Retrieve result proto file path. In general, those are placed in bazel artifacts. - Args: - cmd: fix or freeze? - path: target proto path - tmp: temporary directory. - filename: target proto filename - version: api version to specify target result proto filename + Args: + cmd: fix or freeze? + path: target proto path + tmp: temporary directory. + filename: target proto filename + version: api version to specify target result proto filename - Returns: - actual result proto absolute path - """ + Returns: + actual result proto absolute path + """ base = "./bazel-bin" base += os.path.join(path, "%s_protos" % cmd) base += os.path.join(base, path) @@ -86,13 +86,13 @@ def result_proto_file(cmd, path, tmp, filename, version): def diff(result_file, golden_file): """Execute diff command with unified form - Args: - result_file: result proto file - golden_file: golden proto file + Args: + result_file: result proto file + golden_file: golden proto file - Returns: - output and status code - """ + Returns: + output and status code + """ command = 'diff -u ' command += result_file + ' ' command += golden_file @@ -103,15 +103,15 @@ def diff(result_file, golden_file): def run(cmd, path, filename, version): """Run main execution for protoxform test - Args: - cmd: fix or freeze? - path: target proto path - filename: target proto filename - version: api version to specify target result proto filename + Args: + cmd: fix or freeze? + path: target proto path + filename: target proto filename + version: api version to specify target result proto filename - Returns: - result message extracted from diff command - """ + Returns: + result message extracted from diff command + """ message = "" with tempfile.TemporaryDirectory() as tmp: golden_path = golden_proto_file(path, filename, version) diff --git a/tools/type_whisperer/type_whisperer.py b/tools/type_whisperer/type_whisperer.py index 7d1f824e2482..edcbb4501c1f 100755 --- a/tools/type_whisperer/type_whisperer.py +++ b/tools/type_whisperer/type_whisperer.py @@ -13,8 +13,8 @@ class TypeWhispererVisitor(visitor.Visitor): """Visitor to compute type information from a FileDescriptor proto. - See visitor.Visitor for visitor method docs comments. - """ + See visitor.Visitor for visitor method docs comments. + """ def __init__(self): super(TypeWhispererVisitor, self).__init__() diff --git a/tools/type_whisperer/typedb_gen.py b/tools/type_whisperer/typedb_gen.py index 479be44eeec6..9e0b88ed37cc 100644 --- a/tools/type_whisperer/typedb_gen.py +++ b/tools/type_whisperer/typedb_gen.py @@ -79,12 +79,12 @@ def upgraded_type_with_description(type_name, type_desc): def load_types(path): """Load a tools.type_whisperer.Types proto from the filesystem. - Args: - path: filesystem path for a file in text proto format. + Args: + path: filesystem path for a file in text proto format. - Returns: - tools.type_whisperer.Types proto loaded from path. - """ + Returns: + tools.type_whisperer.Types proto loaded from path. + """ types = Types() with open(path, 'r') as f: text_format.Merge(f.read(), types) @@ -94,19 +94,19 @@ def load_types(path): def next_version_upgrade(type_name, type_map, next_version_upgrade_memo, visited=None): """Does a given type require upgrade between major version? - Performs depth-first search through type dependency graph for any upgraded - types that will force type_name to be upgraded. + Performs depth-first search through type dependency graph for any upgraded + types that will force type_name to be upgraded. - Args: - type_name: fully qualified type name. - type_map: map from type name to tools.type_whisperer.TypeDescription. - next_version_upgrade_memo: a memo dictionary to avoid revisiting nodes - across invocations. - visited: a set of visited nodes in the current search, used to detect loops. + Args: + type_name: fully qualified type name. + type_map: map from type name to tools.type_whisperer.TypeDescription. + next_version_upgrade_memo: a memo dictionary to avoid revisiting nodes + across invocations. + visited: a set of visited nodes in the current search, used to detect loops. - Returns: - A boolean indicating whether the type requires upgrade. - """ + Returns: + A boolean indicating whether the type requires upgrade. + """ if not visited: visited = set([]) # Ignore non-API types.