diff --git a/.gitignore b/.gitignore index 2f19dde0..52376400 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ tests/**/actual .vscode *.swp node_modules +docs/source/api_docs/generated/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa2bf497..4c5d2fae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,20 +16,6 @@ repos: - id: pyupgrade args: - --py38-plus - - repo: https://github.com/PyCQA/isort - rev: '5.12.0' - hooks: - - id: isort - - repo: https://github.com/psf/black - rev: '22.10.0' - hooks: - - id: black - - repo: https://github.com/PyCQA/flake8 - rev: '5.0.4' - hooks: - - id: flake8 - args: - - --show-source - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.21.0 hooks: @@ -39,3 +25,10 @@ repos: files: ^tests/examples/([^/]*)/dependencies.yaml$ args: ["--schemafile", "src/rapids_dependency_file_generator/schema.json"] - id: check-github-workflows + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.4 + hooks: + - id: ruff + files: src/.*$ + - id: ruff-format + files: src/.*$ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..1ef3d588 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -n -v -W --keep-going +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..747ffb7b --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_templates/class.rst b/docs/source/_templates/class.rst new file mode 100644 index 00000000..b29757c5 --- /dev/null +++ b/docs/source/_templates/class.rst @@ -0,0 +1,32 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :show-inheritance: + :inherited-members: + + {% block methods %} + .. automethod:: __init__ + + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + {% for item in methods %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/source/_templates/module.rst b/docs/source/_templates/module.rst new file mode 100644 index 00000000..01a59925 --- /dev/null +++ b/docs/source/_templates/module.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: Module Attributes + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: class.rst + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: module.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/docs/source/api_docs/index.rst b/docs/source/api_docs/index.rst new file mode 100644 index 00000000..0f75753f --- /dev/null +++ b/docs/source/api_docs/index.rst @@ -0,0 +1,9 @@ +API +=== + +.. autosummary:: + :toctree: generated/ + :template: module.rst + :recursive: + + rapids_dependency_file_generator diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..e10cadb3 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,63 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +import datetime + +from packaging.version import Version + +import rapids_dependency_file_generator + +DFG_VERSION = Version(rapids_dependency_file_generator.__version__) +project = "rapids-dependency-file-generator" +copyright = f"2022-{datetime.datetime.today().year}, NVIDIA Corporation" +author = "NVIDIA Corporation" +release = str(DFG_VERSION) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "numpydoc", +] + +templates_path = ["_templates"] +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "pydata_sphinx_theme" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +html_theme_options = { + "external_links": [], + # https://github.com/pydata/pydata-sphinx-theme/issues/1220 + "icon_links": [], + "github_url": "https://github.com/rapidsai/dependency-file-generator", + "twitter_url": "https://twitter.com/rapidsai", + "show_toc_level": 1, + "navbar_align": "right", +} + +html_static_path = ["_static"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +autosummary_ignore_module_all = False + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), +} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..5cb7a025 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,15 @@ +Welcome to rapids-dependency-file-generator's documentation! +============================================================ + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + api_docs/index + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/pyproject.toml b/pyproject.toml index d3a8c8a1..20168984 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ ] [project.scripts] -rapids-dependency-file-generator = "rapids_dependency_file_generator.cli:main" +rapids-dependency-file-generator = "rapids_dependency_file_generator._cli:main" [tool.setuptools] packages = { "find" = { where = ["src"] } } @@ -36,3 +36,12 @@ version = {attr = "rapids_dependency_file_generator._version.__version__"} [tool.isort] profile = "black" + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +select = ["E", "F", "W", "I", "D"] + +[tool.ruff.lint.pydocstyle] +convention = "numpy" diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 8466cc45..821cf570 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -6,7 +6,7 @@ build: noarch: python script: pip install --no-build-isolation --no-deps . entry_points: - - rapids-dependency-file-generator = rapids_dependency_file_generator.cli:main + - rapids-dependency-file-generator = rapids_dependency_file_generator._cli:main source: path: ../ diff --git a/src/rapids_dependency_file_generator/__init__.py b/src/rapids_dependency_file_generator/__init__.py index a98c6c19..30d6cdca 100644 --- a/src/rapids_dependency_file_generator/__init__.py +++ b/src/rapids_dependency_file_generator/__init__.py @@ -1,5 +1,17 @@ +"""Public API for rapids-dependency-file-generator. + +This API can be used by Python build tools or other tools that want to +programmatically generate ``pyproject.toml``, ``requirements.txt``, or +a Conda environment from ``dependencies.yaml``. +""" + +from . import _config, _rapids_dependency_file_generator +from ._config import * # noqa: F401,F403 +from ._rapids_dependency_file_generator import * # noqa: F401,F403 from ._version import __version__ __all__ = [ "__version__", + *_config.__all__, + *_rapids_dependency_file_generator.__all__, ] diff --git a/src/rapids_dependency_file_generator/cli.py b/src/rapids_dependency_file_generator/_cli.py similarity index 85% rename from src/rapids_dependency_file_generator/cli.py rename to src/rapids_dependency_file_generator/_cli.py index a3f09e5a..09d034f7 100644 --- a/src/rapids_dependency_file_generator/cli.py +++ b/src/rapids_dependency_file_generator/_cli.py @@ -2,13 +2,13 @@ import os import warnings -from ._version import __version__ as version -from .config import Output, load_config_from_file -from .constants import default_dependency_file_path -from .rapids_dependency_file_generator import ( +from ._config import Output, load_config_from_file +from ._constants import default_dependency_file_path +from ._rapids_dependency_file_generator import ( delete_existing_files, make_dependency_files, ) +from ._version import __version__ as version def validate_args(argv): @@ -90,12 +90,8 @@ def validate_args(argv): if args.file_key_deprecated: if args.file_key: - raise ValueError( - "The --file_key (deprecated) and --file-key arguments cannot be specified together." - ) - warnings.warn( - "The use of --file_key is deprecated. Use -f or --file-key instead." - ) + raise ValueError("The --file_key (deprecated) and --file-key arguments cannot be specified together.") + warnings.warn("The use of --file_key is deprecated. Use -f or --file-key instead.") args.file_key = args.file_key_deprecated dependent_arg_keys = ["file_key", "output", "matrix"] @@ -111,14 +107,10 @@ def validate_args(argv): raise ValueError( "The --prepend-channels (deprecated) and --prepend-channel arguments cannot be specified together." ) - warnings.warn( - "The use of --prepend-channels is deprecated. Use --prepend-channel instead." - ) + warnings.warn("The use of --prepend-channels is deprecated. Use --prepend-channel instead.") args.prepend_channels = args.prepend_channels_deprecated.split(";") if args.prepend_channels and args.output and args.output != Output.CONDA.value: - raise ValueError( - f"--prepend-channel is only valid with --output {Output.CONDA.value}" - ) + raise ValueError(f"--prepend-channel is only valid with --output {Output.CONDA.value}") # If --clean was passed without arguments, default to cleaning from the root of the # tree where the config file is. @@ -156,6 +148,4 @@ def main(argv=None): if args.clean: delete_existing_files(args.clean) - make_dependency_files( - parsed_config, file_keys, output, matrix, args.prepend_channels, to_stdout - ) + make_dependency_files(parsed_config, file_keys, output, matrix, args.prepend_channels, to_stdout) diff --git a/src/rapids_dependency_file_generator/_config.py b/src/rapids_dependency_file_generator/_config.py new file mode 100644 index 00000000..40a86343 --- /dev/null +++ b/src/rapids_dependency_file_generator/_config.py @@ -0,0 +1,263 @@ +from dataclasses import dataclass, field +from enum import Enum +from os import PathLike +from pathlib import Path + +import yaml + +from . import _constants +from ._rapids_dependency_file_validator import validate_dependencies + +__all__ = [ + "Output", + "FileExtras", + "File", + "PipRequirements", + "CommonDependencies", + "MatrixMatcher", + "SpecificDependencies", + "Dependencies", + "Config", + "parse_config", + "load_config_from_file", +] + + +class Output(Enum): + """An output file type to generate.""" + + PYPROJECT = "pyproject" + """Generate a ``pyproject.toml``.""" + + REQUIREMENTS = "requirements" + """Generate a ``requirements.txt``.""" + + CONDA = "conda" + """Generate a Conda environment file.""" + + +@dataclass +class FileExtras: + """The ``extras`` field of a file key in ``dependencies.yaml``.""" + + table: str + """The ``table`` field.""" + + key: str | None = None + """The ``key`` field.""" + + +@dataclass +class File: + """A file key in ``dependencies.yaml``.""" + + output: set[Output] + """The set of output file types to generate.""" + + includes: list[str] + """The list of dependency sets to include.""" + + extras: FileExtras | None = None + """Optional extra information for the file generator.""" + + matrix: dict[str, list[str]] = field(default_factory=dict) + """The matrix of specific parameters to use when generating.""" + + requirements_dir: Path = Path(_constants.default_requirements_dir) + """The directory in which to write ``requirements.txt``.""" + + conda_dir: Path = Path(_constants.default_conda_dir) + """The directory in which to write the Conda environment file.""" + + pyproject_dir: Path = Path(_constants.default_pyproject_dir) + """The directory in which to write ``pyproject.toml``.""" + + +@dataclass +class PipRequirements: + """A list of Pip requirements to include as dependencies.""" + + pip: list[str] + """The list of Pip requirements.""" + + +@dataclass +class CommonDependencies: + """A dependency entry in the ``common`` field of a dependency set.""" + + output_types: set[Output] + """The set of output types for this entry.""" + + packages: list[str | PipRequirements] + """The list of packages for this entry.""" + + +@dataclass +class MatrixMatcher: + """A matrix matcher for a ``specific`` dependency entry.""" + + matrix: dict[str, str] + """The set of matrix values to match.""" + + packages: list[str | PipRequirements] + """The list of packages for this entry.""" + + +@dataclass +class SpecificDependencies: + """A dependency entry in the ``specific`` field of a dependency set.""" + + output_types: set[Output] + """The set of output types for this entry.""" + + matrices: list[MatrixMatcher] + """The list of matrix matchers for this entry.""" + + +@dataclass +class Dependencies: + """A dependency set.""" + + common: list[CommonDependencies] = field(default_factory=list) + """The list of common dependency entries.""" + + specific: list[SpecificDependencies] = field(default_factory=list) + """The list of specific dependency entries.""" + + +@dataclass +class Config: + """A fully parsed ``dependencies.yaml`` file.""" + + path: Path + """The path to the parsed file.""" + + files: dict[str, File] = field(default_factory=dict) + """The file entries, keyed by name.""" + + channels: list[str] = field(default_factory=lambda: list(_constants.default_channels)) + """A list of channels to include in Conda files.""" + + dependencies: dict[str, Dependencies] = field(default_factory=dict) + """The dependency sets, keyed by name.""" + + +def _parse_outputs(outputs: str | list[str]) -> set[Output]: + if isinstance(outputs, str): + outputs = [outputs] + if outputs == ["none"]: + outputs = [] + return {Output(o) for o in outputs} + + +def _parse_extras(extras: dict[str, str]) -> FileExtras: + return FileExtras( + table=extras["table"], + key=extras.get("key", None), + ) + + +def _parse_file(file_config: dict[str, object]) -> File: + def get_extras(): + try: + extras = file_config["extras"] + except KeyError: + return None + + return _parse_extras(extras) + + return File( + output=_parse_outputs(file_config["output"]), + extras=get_extras(), + includes=list(file_config["includes"]), + matrix={key: list(value) for key, value in file_config.get("matrix", {}).items()}, + requirements_dir=Path(file_config.get("requirements_dir", _constants.default_requirements_dir)), + conda_dir=Path(file_config.get("conda_dir", _constants.default_conda_dir)), + pyproject_dir=Path(file_config.get("pyproject_dir", _constants.default_pyproject_dir)), + ) + + +def _parse_requirement(requirement: str | dict[str, str]) -> str | PipRequirements: + if isinstance(requirement, str): + return requirement + + return PipRequirements(pip=requirement["pip"]) + + +def _parse_dependencies(dependencies: dict[str, object]) -> Dependencies: + return Dependencies( + common=[ + CommonDependencies( + output_types=_parse_outputs(d["output_types"]), + packages=[_parse_requirement(p) for p in d["packages"]], + ) + for d in dependencies.get("common", []) + ], + specific=[ + SpecificDependencies( + output_types=_parse_outputs(d["output_types"]), + matrices=[ + MatrixMatcher( + matrix=dict(m.get("matrix", {}) or {}), + packages=[_parse_requirement(p) for p in m.get("packages", []) or []], + ) + for m in d["matrices"] + ], + ) + for d in dependencies.get("specific", []) + ], + ) + + +def _parse_channels(channels) -> list[str]: + if isinstance(channels, str): + return [channels] + + return list(channels) + + +def parse_config(config: dict[str, object], path: PathLike) -> Config: + """Parse a configuration file from a dictionary. + + Parameters + ---------- + config : dict[str, object] + The dictionary to parse. + path : PathLike + The path to the parsed configuration file. This will be stored as the ``path`` + attribute. + + Returns + ------- + Config + The fully parsed configuration file. + + Raises + ------ + jsonschema.exceptions.ValidationError + If the dependencies do not conform to the schema + """ + validate_dependencies(config) + return Config( + path=Path(path), + files={key: _parse_file(value) for key, value in config["files"].items()}, + channels=_parse_channels(config.get("channels", [])), + dependencies={key: _parse_dependencies(value) for key, value in config["dependencies"].items()}, + ) + + +def load_config_from_file(path: PathLike) -> Config: + """Open a ``dependencies.yaml`` file and parse it. + + Parameters + ---------- + path : PathLike + The path to the configuration file to parse. + + Returns + ------- + Config + The fully parsed configuration file. + """ + with open(path) as f: + return parse_config(yaml.safe_load(f), path) diff --git a/src/rapids_dependency_file_generator/constants.py b/src/rapids_dependency_file_generator/_constants.py similarity index 100% rename from src/rapids_dependency_file_generator/constants.py rename to src/rapids_dependency_file_generator/_constants.py diff --git a/src/rapids_dependency_file_generator/rapids_dependency_file_generator.py b/src/rapids_dependency_file_generator/_rapids_dependency_file_generator.py similarity index 84% rename from src/rapids_dependency_file_generator/rapids_dependency_file_generator.py rename to src/rapids_dependency_file_generator/_rapids_dependency_file_generator.py index 6cb3e936..e3a5e9a7 100644 --- a/src/rapids_dependency_file_generator/rapids_dependency_file_generator.py +++ b/src/rapids_dependency_file_generator/_rapids_dependency_file_generator.py @@ -8,8 +8,12 @@ import tomlkit import yaml -from . import config -from .constants import cli_name +from . import _config +from ._constants import cli_name + +__all__ = [ + "make_dependency_files", +] HEADER = f"# This file is generated by `{cli_name}`." @@ -26,9 +30,7 @@ def delete_existing_files(root: os.PathLike) -> None: The path (relative or absolute) to the root of the directory tree to search for files to delete. """ for dirpath, _, filenames in os.walk(root): - for fn in filter( - lambda fn: fn.endswith(".txt") or fn.endswith(".yaml"), filenames - ): + for fn in filter(lambda fn: fn.endswith(".txt") or fn.endswith(".yaml"), filenames): with open(file_path := os.path.join(dirpath, fn)) as f: try: if HEADER in f.read(): @@ -38,7 +40,7 @@ def delete_existing_files(root: os.PathLike) -> None: def dedupe( - dependencies: list[str | config.PipRequirements], + dependencies: list[str | _config.PipRequirements], ) -> list[str | dict[str, str]]: """Generate the unique set of dependencies contained in a dependency list. @@ -48,14 +50,14 @@ def dedupe( A sequence containing dependencies (possibly including duplicates). Returns - ------ + ------- list[str | dict[str, str]] The `dependencies` with all duplicates removed. """ deduped = sorted({dep for dep in dependencies if isinstance(dep, str)}) dict_deps = defaultdict(list) for dep in filter(lambda dep: not isinstance(dep, str), dependencies): - if isinstance(dep, config.PipRequirements): + if isinstance(dep, _config.PipRequirements): dict_deps["pip"].extend(dep.pip) dict_deps["pip"] = sorted(set(dict_deps["pip"])) if dict_deps: @@ -64,7 +66,7 @@ def dedupe( def grid(gridspec: dict[str, list[str]]) -> Generator[dict[str, str]]: - """Yields the Cartesian product of a `dict` of iterables. + """Yield the Cartesian product of a `dict` of iterables. The input ``gridspec`` is a dictionary whose keys correspond to parameter names. Each key is associated with an iterable of the @@ -89,13 +91,13 @@ def grid(gridspec: dict[str, list[str]]) -> Generator[dict[str, str]]: def make_dependency_file( *, - file_type: config.Output, + file_type: _config.Output, name: os.PathLike, config_file: os.PathLike, output_dir: os.PathLike, conda_channels: list[str], dependencies: list[str | dict[str, list[str]]], - extras: config.FileExtras, + extras: _config.FileExtras, ): """Generate the contents of the dependency file. @@ -129,7 +131,7 @@ def make_dependency_file( # To make changes, edit {relative_path_to_config_file} and run `{cli_name}`. """ ) - if file_type == config.Output.CONDA: + if file_type == _config.Output.CONDA: file_contents += yaml.dump( { "name": os.path.splitext(name)[0], @@ -137,22 +139,20 @@ def make_dependency_file( "dependencies": dependencies, } ) - elif file_type == config.Output.REQUIREMENTS: + elif file_type == _config.Output.REQUIREMENTS: file_contents += "\n".join(dependencies) + "\n" - elif file_type == config.Output.PYPROJECT: + elif file_type == _config.Output.PYPROJECT: if extras.table == "build-system": key = "requires" if extras.key is not None: raise ValueError( - "The 'key' field is not allowed for the 'pyproject' file type when " - "'table' is 'build-system'." + "The 'key' field is not allowed for the 'pyproject' file type when " "'table' is 'build-system'." ) elif extras.table == "project": key = "dependencies" if extras.key is not None: raise ValueError( - "The 'key' field is not allowed for the 'pyproject' file type when " - "'table' is 'project'." + "The 'key' field is not allowed for the 'pyproject' file type when " "'table' is 'project'." ) else: if extras.key is None: @@ -196,7 +196,7 @@ def make_dependency_file( return file_contents -def get_filename(file_type: config.Output, file_key: str, matrix_combo: dict[str, str]): +def get_filename(file_type: _config.Output, file_key: str, matrix_combo: dict[str, str]): """Get the name of the file to which to write a generated dependency set. The file name will be composed of the following components, each determined @@ -227,27 +227,23 @@ def get_filename(file_type: config.Output, file_key: str, matrix_combo: dict[str file_ext = "" file_name_prefix = file_key suffix = "_".join([f"{k}-{v}" for k, v in matrix_combo.items()]) - if file_type == config.Output.CONDA: + if file_type == _config.Output.CONDA: file_ext = ".yaml" - elif file_type == config.Output.REQUIREMENTS: + elif file_type == _config.Output.REQUIREMENTS: file_ext = ".txt" file_type_prefix = "requirements" - elif file_type == config.Output.PYPROJECT: + elif file_type == _config.Output.PYPROJECT: file_ext = ".toml" # Unlike for files like requirements.txt or conda environment YAML files, which # may be named with additional prefixes (e.g. all_cuda_*) pyproject.toml files # need to have that exact name and are never prefixed. file_name_prefix = "pyproject" suffix = "" - filename = "_".join( - filter(None, (file_type_prefix, file_name_prefix, suffix)) - ).replace(".", "") + filename = "_".join(filter(None, (file_type_prefix, file_name_prefix, suffix))).replace(".", "") return filename + file_ext -def get_output_dir( - file_type: config.Output, config_file_path: os.PathLike, file_config: config.File -): +def get_output_dir(file_type: _config.Output, config_file_path: os.PathLike, file_config: _config.File): """Get the directory to which to write a generated dependency file. The output directory is determined by the `file_type` and the corresponding @@ -271,18 +267,16 @@ def get_output_dir( The directory to write the file to. """ path = [os.path.dirname(config_file_path)] - if file_type == config.Output.CONDA: + if file_type == _config.Output.CONDA: path.append(file_config.conda_dir) - elif file_type == config.Output.REQUIREMENTS: + elif file_type == _config.Output.REQUIREMENTS: path.append(file_config.requirements_dir) - elif file_type == config.Output.PYPROJECT: + elif file_type == _config.Output.PYPROJECT: path.append(file_config.pyproject_dir) return os.path.join(*path) -def should_use_specific_entry( - matrix_combo: dict[str, str], specific_entry_matrix: dict[str, str] -): +def should_use_specific_entry(matrix_combo: dict[str, str], specific_entry_matrix: dict[str, str]): """Check if an entry should be used. Dependencies listed in the [dependencies.$DEPENDENCY_GROUP.specific] @@ -312,16 +306,15 @@ def should_use_specific_entry( `matrix_combo` and False otherwise. """ return all( - specific_key in matrix_combo - and fnmatch.fnmatch(matrix_combo[specific_key], specific_value) + specific_key in matrix_combo and fnmatch.fnmatch(matrix_combo[specific_key], specific_value) for specific_key, specific_value in specific_entry_matrix.items() ) def make_dependency_files( - parsed_config: config.Config, + parsed_config: _config.Config, file_keys: list[str], - output: set[config.Output], + output: set[_config.Output], matrix: dict[str, list[str]] | None, prepend_channels: list[str], to_stdout: bool, @@ -329,7 +322,7 @@ def make_dependency_files( """Generate dependency files. This function iterates over data parsed from a YAML file conforming to the - `dependencies.yaml file spec __` + `dependencies.yaml file spec `_ and produces the requested files. Parameters @@ -343,7 +336,7 @@ def make_dependency_files( matrix : dict[str, list[str]] | None The matrix to use, or None if the default matrix from each file key should be used. - prepend_channels: list[str] + prepend_channels : list[str] List of channels to prepend to the ones from parsed_config. to_stdout : bool Whether the output should be written to stdout. If False, it will be @@ -351,12 +344,11 @@ def make_dependency_files( config_file_path. Raises - ------- + ------ ValueError If the file is malformed. There are numerous different error cases which are described by the error messages. """ - for file_key in file_keys: file_config = parsed_config.files[file_key] file_types_to_generate = file_config.output & output @@ -365,10 +357,7 @@ def make_dependency_files( else: file_matrix = file_config.matrix calculated_grid = list(grid(file_matrix)) - if ( - config.Output.PYPROJECT in file_types_to_generate - and len(calculated_grid) > 1 - ): + if _config.Output.PYPROJECT in file_types_to_generate and len(calculated_grid) > 1: raise ValueError("Pyproject outputs can't have more than one matrix output") for file_type in file_types_to_generate: for matrix_combo in calculated_grid: @@ -402,41 +391,29 @@ def make_dependency_files( fallback_entry = specific_matrices_entry continue - if should_use_specific_entry( - matrix_combo, specific_matrices_entry.matrix - ): + if should_use_specific_entry(matrix_combo, specific_matrices_entry.matrix): # Raise an error if multiple specific entries # (not including the fallback_entry) match a # requested matrix combination. if found: - raise ValueError( - f"Found multiple matches for matrix {matrix_combo}" - ) + raise ValueError(f"Found multiple matches for matrix {matrix_combo}") found = True # A package list may be empty as a way to # indicate that for some matrix elements no # packages should be installed. - dependencies.extend( - specific_matrices_entry.packages or [] - ) + dependencies.extend(specific_matrices_entry.packages or []) if not found: if fallback_entry: dependencies.extend(fallback_entry.packages) else: - raise ValueError( - f"No matching matrix found in '{include}' for: {matrix_combo}" - ) + raise ValueError(f"No matching matrix found in '{include}' for: {matrix_combo}") # Dedupe deps and print / write to filesystem full_file_name = get_filename(file_type, file_key, matrix_combo) deduped_deps = dedupe(dependencies) - output_dir = ( - "." - if to_stdout - else get_output_dir(file_type, parsed_config.path, file_config) - ) + output_dir = "." if to_stdout else get_output_dir(file_type, parsed_config.path, file_config) contents = make_dependency_file( file_type=file_type, name=full_file_name, diff --git a/src/rapids_dependency_file_generator/rapids_dependency_file_validator.py b/src/rapids_dependency_file_generator/_rapids_dependency_file_validator.py similarity index 73% rename from src/rapids_dependency_file_generator/rapids_dependency_file_validator.py rename to src/rapids_dependency_file_generator/_rapids_dependency_file_validator.py index 0df84a76..a9a8aab0 100644 --- a/src/rapids_dependency_file_generator/rapids_dependency_file_validator.py +++ b/src/rapids_dependency_file_generator/_rapids_dependency_file_validator.py @@ -8,13 +8,11 @@ import jsonschema from jsonschema.exceptions import best_match -SCHEMA = json.loads( - importlib.resources.files(__package__).joinpath("schema.json").read_bytes() -) +SCHEMA = json.loads(importlib.resources.files(__package__).joinpath("schema.json").read_bytes()) def validate_dependencies(dependencies): - """Valid a dictionary against the dependencies.yaml spec. + """Validate a dictionary against the dependencies.yaml spec. Parameters ---------- @@ -31,7 +29,5 @@ def validate_dependencies(dependencies): if len(errors) > 0: print("The provided dependency file contains schema errors.", file=sys.stderr) best_matching_error = best_match(errors) - print( - "\n", textwrap.indent(str(best_matching_error), "\t"), "\n", file=sys.stderr - ) + print("\n", textwrap.indent(str(best_matching_error), "\t"), "\n", file=sys.stderr) raise RuntimeError("The provided dependencies data is invalid.") diff --git a/src/rapids_dependency_file_generator/config.py b/src/rapids_dependency_file_generator/config.py deleted file mode 100644 index dcd0a7ed..00000000 --- a/src/rapids_dependency_file_generator/config.py +++ /dev/null @@ -1,171 +0,0 @@ -from dataclasses import dataclass, field -from enum import Enum -from os import PathLike -from pathlib import Path - -import yaml - -from . import constants -from .rapids_dependency_file_validator import validate_dependencies - - -class Output(Enum): - PYPROJECT = "pyproject" - REQUIREMENTS = "requirements" - CONDA = "conda" - - -@dataclass -class FileExtras: - table: str - key: str | None = None - - -@dataclass -class File: - output: set[Output] - includes: list[str] - extras: FileExtras | None = None - matrix: dict[str, list[str]] = field(default_factory=dict) - requirements_dir: Path = Path(constants.default_requirements_dir) - conda_dir: Path = Path(constants.default_conda_dir) - pyproject_dir: Path = Path(constants.default_pyproject_dir) - - -@dataclass -class PipRequirements: - pip: list[str] - - -@dataclass -class CommonDependencies: - output_types: set[Output] - packages: list[str | PipRequirements] - - -@dataclass -class MatrixMatcher: - matrix: dict[str, str] - packages: list[str | PipRequirements] - - -@dataclass -class SpecificDependencies: - output_types: set[Output] - matrices: list[MatrixMatcher] - - -@dataclass -class Dependencies: - common: list[CommonDependencies] = field(default_factory=list) - specific: list[SpecificDependencies] = field(default_factory=list) - - -@dataclass -class Config: - path: Path - files: dict[str, File] = field(default_factory=dict) - channels: list[str] = field( - default_factory=lambda: list(constants.default_channels) - ) - dependencies: dict[str, Dependencies] = field(default_factory=dict) - - -def _parse_outputs(outputs: str | list[str]) -> set[Output]: - if isinstance(outputs, str): - outputs = [outputs] - if outputs == ["none"]: - outputs = [] - return {Output(o) for o in outputs} - - -def _parse_extras(extras: dict[str, str]) -> FileExtras: - return FileExtras( - table=extras["table"], - key=extras.get("key", None), - ) - - -def _parse_file(file_config: dict[str, object]) -> File: - def get_extras(): - try: - extras = file_config["extras"] - except KeyError: - return None - - return _parse_extras(extras) - - return File( - output=_parse_outputs(file_config["output"]), - extras=get_extras(), - includes=list(file_config["includes"]), - matrix={ - key: list(value) for key, value in file_config.get("matrix", {}).items() - }, - requirements_dir=Path( - file_config.get("requirements_dir", constants.default_requirements_dir) - ), - conda_dir=Path(file_config.get("conda_dir", constants.default_conda_dir)), - pyproject_dir=Path( - file_config.get("pyproject_dir", constants.default_pyproject_dir) - ), - ) - - -def _parse_requirement(requirement: str | dict[str, str]) -> str | PipRequirements: - if isinstance(requirement, str): - return requirement - - return PipRequirements(pip=requirement["pip"]) - - -def _parse_dependencies(dependencies: dict[str, object]) -> Dependencies: - return Dependencies( - common=[ - CommonDependencies( - output_types=_parse_outputs(d["output_types"]), - packages=[_parse_requirement(p) for p in d["packages"]], - ) - for d in dependencies.get("common", []) - ], - specific=[ - SpecificDependencies( - output_types=_parse_outputs(d["output_types"]), - matrices=[ - MatrixMatcher( - matrix=dict(m.get("matrix", {}) or {}), - packages=[ - _parse_requirement(p) for p in m.get("packages", []) or [] - ], - ) - for m in d["matrices"] - ], - ) - for d in dependencies.get("specific", []) - ], - ) - - -def _parse_channels(channels) -> list[str]: - if isinstance(channels, str): - return [channels] - - return list(channels) - - -def parse_config(config: dict[str, object], path: PathLike) -> Config: - validate_dependencies(config) - return Config( - path=Path(path), - files={key: _parse_file(value) for key, value in config["files"].items()}, - channels=_parse_channels(config.get("channels", [])), - dependencies={ - key: _parse_dependencies(value) - for key, value in config["dependencies"].items() - }, - ) - - -def load_config_from_file(path: PathLike) -> Config: - with open(path) as f: - return parse_config(yaml.safe_load(f), path) diff --git a/tests/conftest.py b/tests/conftest.py index 7e6951fb..05bcb8b1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,6 @@ import pytest -from rapids_dependency_file_generator.rapids_dependency_file_validator import SCHEMA +from rapids_dependency_file_generator._rapids_dependency_file_validator import SCHEMA @pytest.fixture(scope="session") diff --git a/tests/test_cli.py b/tests/test_cli.py index 02b776a6..923a7ff7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,6 @@ import pytest -from rapids_dependency_file_generator.cli import generate_matrix, validate_args +from rapids_dependency_file_generator._cli import generate_matrix, validate_args def test_generate_matrix(): diff --git a/tests/test_config.py b/tests/test_config.py index 8afb6944..823fda75 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -4,18 +4,22 @@ import pytest -from rapids_dependency_file_generator import config, constants +from rapids_dependency_file_generator import _config, _constants @pytest.mark.parametrize( ["input", "output"], [ - *((e.value, {e}) for e in config.Output), + *((e.value, {e}) for e in _config.Output), ("none", set()), (["none"], set()), ( ["pyproject", "requirements", "conda"], - {config.Output.PYPROJECT, config.Output.REQUIREMENTS, config.Output.CONDA}, + { + _config.Output.PYPROJECT, + _config.Output.REQUIREMENTS, + _config.Output.CONDA, + }, ), ("invalid", ValueError), (["invalid"], ValueError), @@ -25,25 +29,25 @@ def test_parse_outputs(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_outputs(input) + _config._parse_outputs(input) else: - assert config._parse_outputs(input) == output + assert _config._parse_outputs(input) == output @pytest.mark.parametrize( ["input", "output"], [ ("package", "package"), - ({"pip": "package"}, config.PipRequirements(pip="package")), + ({"pip": "package"}, _config.PipRequirements(pip="package")), ({"other": "invalid"}, KeyError), ], ) def test_parse_requirement(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_requirement(input) + _config._parse_requirement(input) else: - assert config._parse_requirement(input) == output + assert _config._parse_requirement(input) == output @pytest.mark.parametrize( @@ -51,11 +55,11 @@ def test_parse_requirement(input, output): [ ( {"table": "build-system", "key": "requires"}, - config.FileExtras(table="build-system", key="requires"), + _config.FileExtras(table="build-system", key="requires"), ), ( {"table": "build-system"}, - config.FileExtras(table="build-system", key=None), + _config.FileExtras(table="build-system", key=None), ), ({}, KeyError), ], @@ -63,9 +67,9 @@ def test_parse_requirement(input, output): def test_parse_extras(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_extras(input) + _config._parse_extras(input) else: - assert config._parse_extras(input) == output + assert _config._parse_extras(input) == output @pytest.mark.parametrize( @@ -76,14 +80,14 @@ def test_parse_extras(input, output): "output": "none", "includes": [], }, - config.File( + _config.File( output=set(), extras=None, includes=[], matrix={}, - requirements_dir=Path(constants.default_requirements_dir), - conda_dir=Path(constants.default_conda_dir), - pyproject_dir=Path(constants.default_pyproject_dir), + requirements_dir=Path(_constants.default_requirements_dir), + conda_dir=Path(_constants.default_conda_dir), + pyproject_dir=Path(_constants.default_pyproject_dir), ), ), ( @@ -102,9 +106,9 @@ def test_parse_extras(input, output): "conda_dir": "conda_recipe", "pyproject_dir": "python_pyproject", }, - config.File( - output={config.Output.CONDA, config.Output.PYPROJECT}, - extras=config.FileExtras(table="build-system", key="requires"), + _config.File( + output={_config.Output.CONDA, _config.Output.PYPROJECT}, + extras=_config.FileExtras(table="build-system", key="requires"), includes=["py_build", "py_run"], matrix={ "cuda": ["11", "12"], @@ -120,9 +124,9 @@ def test_parse_extras(input, output): def test_parse_file(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_file(input) + _config._parse_file(input) else: - assert config._parse_file(input) == output + assert _config._parse_file(input) == output @pytest.mark.parametrize( @@ -130,7 +134,7 @@ def test_parse_file(input, output): [ ( {}, - config.Dependencies(common=[], specific=[]), + _config.Dependencies(common=[], specific=[]), ), ( { @@ -178,41 +182,44 @@ def test_parse_file(input, output): }, ], }, - config.Dependencies( + _config.Dependencies( common=[ - config.CommonDependencies( + _config.CommonDependencies( output_types=set(), packages=[], ), - config.CommonDependencies( + _config.CommonDependencies( output_types={ - config.Output.PYPROJECT, - config.Output.REQUIREMENTS, + _config.Output.PYPROJECT, + _config.Output.REQUIREMENTS, }, packages=[ "package1", - config.PipRequirements(pip=["package2"]), + _config.PipRequirements(pip=["package2"]), ], ), ], specific=[ - config.SpecificDependencies( + _config.SpecificDependencies( output_types=set(), matrices=[ - config.MatrixMatcher( + _config.MatrixMatcher( matrix={}, packages=[], ), ], ), - config.SpecificDependencies( - output_types={config.Output.REQUIREMENTS, config.Output.CONDA}, + _config.SpecificDependencies( + output_types={ + _config.Output.REQUIREMENTS, + _config.Output.CONDA, + }, matrices=[ - config.MatrixMatcher( + _config.MatrixMatcher( matrix={"cuda": "11", "arch": "x86_64"}, packages=[ "package3", - config.PipRequirements(pip=["package4"]), + _config.PipRequirements(pip=["package4"]), ], ), ], @@ -225,9 +232,9 @@ def test_parse_file(input, output): def test_parse_dependencies(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_dependencies(input) + _config._parse_dependencies(input) else: - assert config._parse_dependencies(input) == output + assert _config._parse_dependencies(input) == output @pytest.mark.parametrize( @@ -240,9 +247,9 @@ def test_parse_dependencies(input, output): def test_parse_channels(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config._parse_channels(input) + _config._parse_channels(input) else: - assert config._parse_channels(input) == output + assert _config._parse_channels(input) == output @pytest.mark.parametrize( @@ -285,11 +292,11 @@ def test_parse_channels(input, output): }, }, "dependencies.yaml", - config.Config( + _config.Config( path=Path("dependencies.yaml"), files={ - "python": config.File( - output={config.Output.PYPROJECT}, + "python": _config.File( + output={_config.Output.PYPROJECT}, includes=["py_build"], ), }, @@ -298,23 +305,23 @@ def test_parse_channels(input, output): "nvidia", ], dependencies={ - "py_build": config.Dependencies( + "py_build": _config.Dependencies( common=[ - config.CommonDependencies( - output_types={config.Output.PYPROJECT}, + _config.CommonDependencies( + output_types={_config.Output.PYPROJECT}, packages=[ "package1", ], ), ], specific=[ - config.SpecificDependencies( + _config.SpecificDependencies( output_types={ - config.Output.CONDA, - config.Output.REQUIREMENTS, + _config.Output.CONDA, + _config.Output.REQUIREMENTS, }, matrices=[ - config.MatrixMatcher( + _config.MatrixMatcher( matrix={}, packages=[], ), @@ -330,9 +337,9 @@ def test_parse_channels(input, output): def test_parse_config(input, path, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config.parse_config(input, path) + _config.parse_config(input, path) else: - assert config.parse_config(input, path) == output + assert _config.parse_config(input, path) == output @pytest.mark.parametrize( @@ -361,11 +368,11 @@ def test_parse_config(input, path, output): packages: """ ), - config.Config( + _config.Config( path=Path("dependencies.yaml"), files={ - "python": config.File( - output={config.Output.PYPROJECT}, + "python": _config.File( + output={_config.Output.PYPROJECT}, includes=["py_build"], ), }, @@ -374,23 +381,23 @@ def test_parse_config(input, path, output): "nvidia", ], dependencies={ - "py_build": config.Dependencies( + "py_build": _config.Dependencies( common=[ - config.CommonDependencies( - output_types={config.Output.PYPROJECT}, + _config.CommonDependencies( + output_types={_config.Output.PYPROJECT}, packages=[ "package1", ], ), ], specific=[ - config.SpecificDependencies( + _config.SpecificDependencies( output_types={ - config.Output.CONDA, - config.Output.REQUIREMENTS, + _config.Output.CONDA, + _config.Output.REQUIREMENTS, }, matrices=[ - config.MatrixMatcher( + _config.MatrixMatcher( matrix={}, packages=[], ), @@ -410,7 +417,7 @@ def test_load_config_from_file(input, output): if isinstance(output, type) and issubclass(output, Exception): with pytest.raises(output): - config.load_config_from_file(f.name) + _config.load_config_from_file(f.name) else: output.path = Path(f.name) - assert config.load_config_from_file(f.name) == output + assert _config.load_config_from_file(f.name) == output diff --git a/tests/test_examples.py b/tests/test_examples.py index 7f6d3a86..55b0b456 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -8,7 +8,7 @@ import yaml from jsonschema.exceptions import ValidationError -from rapids_dependency_file_generator.cli import main +from rapids_dependency_file_generator._cli import main CURRENT_DIR = pathlib.Path(__file__).parent diff --git a/tests/test_rapids_dependency_file_generator.py b/tests/test_rapids_dependency_file_generator.py index c6befcb0..1a35a4e9 100644 --- a/tests/test_rapids_dependency_file_generator.py +++ b/tests/test_rapids_dependency_file_generator.py @@ -2,9 +2,9 @@ import yaml -from rapids_dependency_file_generator import config -from rapids_dependency_file_generator.constants import cli_name -from rapids_dependency_file_generator.rapids_dependency_file_generator import ( +from rapids_dependency_file_generator import _config +from rapids_dependency_file_generator._constants import cli_name +from rapids_dependency_file_generator._rapids_dependency_file_generator import ( dedupe, make_dependency_file, should_use_specific_entry, @@ -21,15 +21,15 @@ def test_dedupe(): [ "dep1", "dep1", - config.PipRequirements(pip=["pip_dep1", "pip_dep2"]), - config.PipRequirements(pip=["pip_dep1", "pip_dep2"]), + _config.PipRequirements(pip=["pip_dep1", "pip_dep2"]), + _config.PipRequirements(pip=["pip_dep1", "pip_dep2"]), ] ) assert deduped == ["dep1", {"pip": ["pip_dep1", "pip_dep2"]}] @mock.patch( - "rapids_dependency_file_generator.rapids_dependency_file_generator.os.path.relpath" + "rapids_dependency_file_generator._rapids_dependency_file_generator.os.path.relpath" ) def test_make_dependency_file(mock_relpath): relpath = "../../config_file.yaml" @@ -39,7 +39,7 @@ def test_make_dependency_file(mock_relpath): # To make changes, edit {relpath} and run `{cli_name}`. """ env = make_dependency_file( - file_type=config.Output.CONDA, + file_type=_config.Output.CONDA, name="tmp_env.yaml", config_file="config_file", output_dir="output_path", @@ -56,7 +56,7 @@ def test_make_dependency_file(mock_relpath): ) env = make_dependency_file( - file_type=config.Output.REQUIREMENTS, + file_type=_config.Output.REQUIREMENTS, name="tmp_env.txt", config_file="config_file", output_dir="output_path",