diff --git a/.authors.yml b/.authors.yml index 99635e8cc9..89d9a7ae00 100644 --- a/.authors.yml +++ b/.authors.yml @@ -4,23 +4,24 @@ alternate_emails: - msarahan@continuum.io - msarahan@gmail.com + - msarahan@nvidia.com aliases: - Mike Sarahan - Michael Sarahan - num_commits: 4000 + num_commits: 2001 first_commit: 2015-09-04 21:31:08 - name: Jonathan J. Helmus email: jjhelmus@gmail.com aliases: - Jonathan Helmus - num_commits: 109 + num_commits: 110 first_commit: 2014-06-09 17:25:05 github: jjhelmus - name: Isuru Fernando email: isuruf@gmail.com alternate_emails: - isuru.11@cse.mrt.ac.lk - num_commits: 82 + num_commits: 84 first_commit: 2017-06-16 15:14:34 github: isuruf - name: Dan Blanchard @@ -63,7 +64,7 @@ alternate_emails: - mandeep@users.noreply.github.com - mbhutani@continuum.io - num_commits: 86 + num_commits: 43 first_commit: 2017-05-17 23:54:01 github: mandeep - name: Filipe Fernandes @@ -117,7 +118,7 @@ - heather999@users.noreply.github.com aliases: - heather999 - num_commits: 4 + num_commits: 2 first_commit: 2016-04-11 12:02:50 github: heather999 - name: Ryan Grout @@ -161,7 +162,7 @@ aliases: - MinRK github: minrk - num_commits: 14 + num_commits: 16 first_commit: 2014-02-13 19:43:59 - name: Matty G email: meawoppl@gmail.com @@ -571,7 +572,7 @@ alternate_emails: - scastellarin95@gmail.com - scastellarin@anaconda.com - num_commits: 196 + num_commits: 98 first_commit: 2016-09-06 16:58:21 github: soapy1 - name: Bruno Oliveira @@ -611,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 70 + num_commits: 85 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -754,7 +755,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 136 + num_commits: 74 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -873,7 +874,7 @@ alternate_emails: - 5738695+183amir@users.noreply.github.com - amir.mohammadi@idiap.ch - num_commits: 12 + num_commits: 6 first_commit: 2018-02-27 16:37:19 - name: David Li email: li.davidm96@gmail.com @@ -967,7 +968,7 @@ first_commit: 2019-01-26 13:17:33 - name: Rachel Rigdon email: rrigdon@anaconda.com - num_commits: 268 + num_commits: 134 first_commit: 2019-01-24 15:12:09 github: rrigdon aliases: @@ -1018,7 +1019,7 @@ github: theultimate1 - name: Kai Tietz email: ktietz@anaconda.com - num_commits: 8 + num_commits: 9 first_commit: 2019-04-04 02:38:29 github: katietz alternate_emails: @@ -1056,14 +1057,16 @@ github: spalmrot-tic - name: Daniel Bast email: 2790401+dbast@users.noreply.github.com - num_commits: 14 + num_commits: 16 first_commit: 2019-06-07 02:44:13 github: dbast - name: Duncan Macleod - email: duncan.macleod@ligo.org - num_commits: 5 + num_commits: 7 + email: duncanmmacleod+github@gmail.com first_commit: 2019-06-13 08:07:25 github: duncanmmacleod + alternate_emails: + - duncan.macleod@ligo.org - name: Chris Osborn email: csosborn@users.noreply.github.com num_commits: 1 @@ -1118,7 +1121,7 @@ alternate_emails: - becker.mr@gmail.com - beckermr@users.noreply.github.com - num_commits: 38 + num_commits: 19 first_commit: 2019-10-17 23:05:16 github: beckermr - name: Jinzhe Zeng @@ -1199,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 60 + num_commits: 178 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1225,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 21 + num_commits: 32 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1240,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 27 + num_commits: 64 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1251,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 5 + num_commits: 23 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1262,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 36 + num_commits: 96 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1268,7 +1271,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 3 + num_commits: 15 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1278,7 +1281,7 @@ - name: Travis Hathaway email: travis.j.hathaway@gmail.com github: travishathaway - num_commits: 2 + num_commits: 7 first_commit: 2022-05-12 05:53:02 - name: Kyle Leaders email: remkade@users.noreply.github.com @@ -1294,18 +1297,20 @@ first_commit: 2022-04-18 12:03:05 - name: Jürgen Gmach email: juergen.gmach@googlemail.com + alternate_emails: + - juergen.gmach@canonical.com github: jugmac00 - num_commits: 2 + num_commits: 3 first_commit: 2022-05-31 07:52:17 - name: Katherine Kinnaman email: kkinnaman@anaconda.com github: kathatherine - num_commits: 1 + num_commits: 3 first_commit: 2022-07-07 10:56:31 - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 1 + num_commits: 4 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1322,7 +1327,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 2 + num_commits: 10 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1334,3 +1339,103 @@ github: brettcannon num_commits: 1 first_commit: 2022-11-16 21:54:14 +- name: Srivas Venkatesh + email: 110486050+sven6002@users.noreply.github.com + num_commits: 2 + first_commit: 2022-12-14 19:50:36 + github: sven6002 +- name: Ernst Luring + email: ernst@ejldigital.com + num_commits: 1 + first_commit: 2023-02-23 20:40:15 + github: ernstluring +- name: Felix Kühnl + email: xileF1337@users.noreply.github.com + num_commits: 2 + first_commit: 2023-03-07 22:37:07 + github: xileF1337 +- name: T Coxon + email: 97948946+tttc3@users.noreply.github.com + num_commits: 1 + first_commit: 2023-02-24 01:34:03 + github: tttc3 +- name: Peet Whittaker + email: peet.whittaker@gmail.com + num_commits: 1 + first_commit: 2023-02-24 01:35:43 + github: peetw +- name: Johnny + email: johnnync13@gmail.com + alternate_emails: + - johnnynuca14@gmail.com + num_commits: 2 + first_commit: 2023-03-22 00:34:22 + github: johnnynunez +- name: Ryan Keith + email: rkeith@anaconda.com + aliases: + - Ryan + github: ryanskeith + num_commits: 6 + first_commit: 2023-03-22 03:11:02 +- name: Rishabh Singh + email: 67859818+rishabh11336@users.noreply.github.com + aliases: + - rishabh11336 + github: rishabh11336 + num_commits: 2 + first_commit: 2023-05-15 11:19:48 +- name: Ferry Firmansjah + email: 103191403+ffirmanff@users.noreply.github.com + github: ffirmanff + num_commits: 1 + first_commit: 2023-04-14 11:54:03 +- name: Riadh Fezzani + email: rfezzani@gmail.com + github: rfezzani + num_commits: 1 + first_commit: 2023-05-23 13:46:49 +- name: Jose Diaz-Gonzalez + email: email@josediazgonzalez.com + github: josegonzalez + num_commits: 1 + first_commit: 2023-06-14 16:02:40 +- name: Jack Olivieri + email: boldorider4@gmail.com + github: boldorider4 + num_commits: 1 + first_commit: 2023-08-30 10:32:34 +- name: Wolf Vollprecht + email: w.vollprecht@gmail.com + github: wolfv + num_commits: 1 + first_commit: 2023-09-22 07:01:49 +- name: Dave Karetnyk + email: Dave.Karetnyk@gmail.com + github: DaveKaretnyk + num_commits: 1 + first_commit: 2023-09-16 05:21:09 +- name: Shaun Walbridge + email: 46331011+scdub@users.noreply.github.com + github: scdub + num_commits: 2 + first_commit: 2023-08-18 02:53:28 +- name: Daniel Petry + email: dpetry@anaconda.com + github: danpetry + aliases: + - danpetry + num_commits: 1 + first_commit: 2023-11-02 13:57:57 +- name: H. Vetinari + email: h.vetinari@gmx.com + github: h-vetinari + aliases: + - h-vetinari + num_commits: 1 + first_commit: 2023-10-25 09:33:34 +- name: Finn Womack + email: flan313@gmail.com + num_commits: 1 + first_commit: 2024-02-06 11:43:45 + github: finnagin diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index ababc663b1..0000000000 --- a/.coveragerc +++ /dev/null @@ -1,3 +0,0 @@ -[run] -parallel=True -omit=conda_build/skeletons/_example_skeleton.py diff --git a/.devcontainer/apt-deps.txt b/.devcontainer/apt-deps.txt new file mode 100644 index 0000000000..130c90c515 --- /dev/null +++ b/.devcontainer/apt-deps.txt @@ -0,0 +1,5 @@ +git +less +htop +nano +ssh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..9f5533a660 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,38 @@ +// For format details, see https://aka.ms/devcontainer.json +{ + "name": "Miniconda (default-channel=defaults)", + "image": "continuumio/miniconda3:latest", + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "bash /workspaces/conda-build/.devcontainer/post_create.sh", + // Use 'postStartCommand' to run commands after the container is started. + "postStartCommand": "bash /workspaces/conda-build/.devcontainer/post_start.sh", + + // Configure tool-specific properties. + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true + }, + "extensions": [ + "charliermarsh.ruff", + "eamodio.gitlens", + "ms-toolsai.jupyter" + ] + } + } + + // Adjust to connect as non-root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root", + +} diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh new file mode 100644 index 0000000000..766bcb9f29 --- /dev/null +++ b/.devcontainer/post_create.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -euo pipefail + +BASE_CONDA=${BASE_CONDA:-/opt/conda} +SRC_CONDA_BUILD=${SRC_CONDA_BUILD:-/workspaces/conda-build} + +if which apt-get > /dev/null; then + HERE=$(dirname $0) + echo "Installing system dependencies" + apt-get update + DEBIAN_FRONTEND=noninteractive xargs -a "$HERE/apt-deps.txt" apt-get install -y +fi + +# Clear history to avoid unneeded conflicts +echo "Clearing base history..." +echo '' > "$BASE_CONDA/conda-meta/history" + +echo "Installing dev dependencies" +"$BASE_CONDA/bin/conda" install \ + -n base \ + --yes \ + --quiet \ + --file "$SRC_CONDA_BUILD/tests/requirements.txt" \ + --file "$SRC_CONDA_BUILD/tests/requirements-Linux.txt" \ + --file "$SRC_CONDA_BUILD/tests/requirements-ci.txt" \ + "conda>=23.7.0" diff --git a/.devcontainer/post_start.sh b/.devcontainer/post_start.sh new file mode 100644 index 0000000000..804a44a84f --- /dev/null +++ b/.devcontainer/post_start.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +# This script assumes we are running in a Miniconda container where: +# - /opt/conda is the Miniconda or Miniforge installation directory +# - https://github.com/conda/conda is mounted at /workspaces/conda +# - https://github.com/conda/conda-libmamba-solver is mounted at +# /workspaces/conda-libmamba-solver +# - https://github.com/mamba-org/mamba is (optionally) mounted at +# /workspaces/mamba + +set -euo pipefail + +BASE_CONDA=${BASE_CONDA:-/opt/conda} +SRC_CONDA_BUILD=${SRC_CONDA_BUILD:-/workspaces/conda-build} + +echo "Installing conda-build in dev mode..." +"$BASE_CONDA/bin/python" -m pip install -e "$SRC_CONDA_BUILD" --no-deps + +set -x +conda list -p "$BASE_CONDA" +conda info +conda config --show-sources +set +x diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..1cf332271a --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,11 @@ +# git revs to ignore when exploring the repo history with git blame +# usage: +# * via args: `git blame --ignore-revs-file .git-blame-ignore-revs $file` +# * via settings `git config --local blame.ignoreRevsFile .git-blame-ignore-revs` +# +# useful: +# * mark skipped lines with "?": `git config --global blame.markIgnoredLines true` +# * mark lines added by skipped commit with "*": `git config --global blame.markUnblamableLines true` + +# black & isort auto format (#4836) +4131968d4ae902e6419028bf6c5102f3e061a950 diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..8fb235d704 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes index 10174d0d33..cf9df9bc03 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,2 @@ -conda_build/_version.py export-subst * text=auto eol=lf +.git_archival.txt export-subst diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..eb9895e16e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# Syntax for this file at https://help.github.com/articles/about-codeowners/ + +* @conda/builds-tools diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index 8227415512..a53f6fba87 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -8,7 +8,7 @@ body: value: | Because processing new bug reports is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible. - > **Note** + > [!NOTE] > Bug reports that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary. diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index 0759aac1cb..bc022b4122 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -8,7 +8,7 @@ body: value: | Because processing new feature requests is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible. - > **Note** + > [!NOTE] > Feature requests that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if your feature request has already been submitted. If you find something related, please upvote that issue and provide additional details as necessary. @@ -40,7 +40,7 @@ body: id: what attributes: label: What should happen? - description: What should be the user experience with the feature? Describe from a user perpective what they would do and see. + description: What should be the user experience with the feature? Describe from a user perspective what they would do and see. - type: textarea id: context attributes: diff --git a/.github/ISSUE_TEMPLATE/2_documentation.yml b/.github/ISSUE_TEMPLATE/2_documentation.yml new file mode 100644 index 0000000000..f3ce040892 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_documentation.yml @@ -0,0 +1,36 @@ +name: Documentation +description: Create a documentation related issue. +labels: + - type::documentation +body: + - type: markdown + attributes: + value: | + > [!NOTE] + > Documentation requests that are incomplete or missing information may be closed as inactionable. + + Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary. + + 💐 Thank you for helping to make conda better. We would be unable to improve conda without our community! + - type: checkboxes + id: checks + attributes: + label: Checklist + description: Please confirm and check all of the following options. + options: + - label: I added a descriptive title + required: true + - label: I searched open reports and couldn't find a duplicate + required: true + - type: textarea + id: what + attributes: + label: What happened? + description: Mention here any typos, broken links, or missing, incomplete, or outdated information, etc. that you have noticed in the conda docs or CLI help. + validations: + required: true + - type: textarea + id: context + attributes: + label: Additional Context + description: Include any additional information (or screenshots) that you think would be valuable. diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index 77dfc7ecb9..9b3637f973 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -10,7 +10,7 @@ body: If you are attempting to report a bug, propose a new feature, or some other code change please use one of the other forms available. - > **Note** + > [!NOTE] > Epics that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if a similar epic has already been opened. If you find something related, please upvote that issue and provide additional details as necessary. @@ -24,23 +24,82 @@ body: options: - label: I added a descriptive title required: true - - label: I searched open reports and couldn't find a duplicate + - label: I searched open issues and couldn't find a duplicate required: true + - type: textarea - id: summary + id: what attributes: - label: Summary + label: What? description: >- - Define the highlevel objectives to be accomplished in this epic. Include the - bigger picture of what is changing and/or the user story for why the - changes are desired/necessary. + What feature or problem will be addressed in this epic? + placeholder: Please describe here. validations: required: true + - type: textarea + id: why attributes: - label: Linked Issues & PRs - description: List all issues related to this epic. + label: Why? + description: >- + Why is the reported issue(s) a problem, or why is the proposed feature needed? + (Research and spike issues can be linked here.) value: | - - [ ] # + - [ ] + placeholder: Please describe here and/or link to relevant supporting issues. + validations: + required: true + + - type: textarea + id: user_impact + attributes: + label: User impact + description: >- + In what specific way(s) will users benefit from this change? (e.g. use cases or performance improvements) + placeholder: Please describe here. validations: required: true + + - type: textarea + id: goals + attributes: + label: Goals + description: >- + What goal(s) should this epic accomplish? + value: | + - [ ] + validations: + required: true + + - type: textarea + id: tasks + attributes: + label: Tasks + description: >- + What needs to be done to implement this change? + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocked_by + attributes: + label: 'This epic is blocked by:' + description: >- + Epics and issues that block this epic. + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocks + attributes: + label: 'This epic blocks:' + description: >- + Epics and issues that are blocked by this epic. + value: | + - [ ] + validations: + required: false diff --git a/.codecov.yml b/.github/codecov.yml similarity index 100% rename from .codecov.yml rename to .github/codecov.yml diff --git a/ci/github/.condarc b/.github/condarc similarity index 69% rename from ci/github/.condarc rename to .github/condarc index 01be5676eb..a76e773f8f 100644 --- a/ci/github/.condarc +++ b/.github/condarc @@ -2,6 +2,5 @@ auto_update_conda: False auto_activate_base: True notify_outdated_conda: False changeps1: False -pkgs_dirs: -- /usr/share/miniconda/envs/test/pkgs always_yes: True +local_repodata_ttl: 7200 diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..508818874b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + - package-ecosystem: pip + directory: /docs/ + schedule: + interval: weekly + allow: + # Allow only production updates for Sphinx + - dependency-name: sphinx + dependency-type: production + - package-ecosystem: github-actions + directory: /.github/workflows + schedule: + interval: weekly diff --git a/.github/labels.yml b/.github/labels.yml index cdd6853502..bcc616d339 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -1,28 +1,9 @@ -# Deprecated -- name: 3_In_Progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: 4_Needs_Review - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: effort-high - description: "[deprecated]" - color: "888888" -- name: effort-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: effort-medium - description: "[deprecated]" - color: "888888" -- name: in_progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: knowledge-high - description: "[deprecated]" - color: "888888" -- name: knowledge-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: knowledge-medium - description: "[deprecated]" - color: "888888" +# Builds + - name: build::review + description: trigger a build for this PR + color: '#7b4052' + +# Tags + - name: tag::noarch + description: related to noarch builds + color: '#86c579' diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 672657ed26..cad5d250b0 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -17,14 +17,16 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-14 + subdir: osx-arm64 + - runner: macos-13 subdir: osx-64 - runner: windows-latest subdir: win-64 runs-on: ${{ matrix.runner }} steps: - name: Remove build label - uses: actions/github-script@v6 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea with: github-token: ${{ secrets.CANARY_ACTION_TOKEN }} script: | @@ -46,19 +48,19 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@v22.10.0 + uses: conda/actions/canary-release@1e442e090ad28c9b0f85697105703a303320ffd1 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} anaconda-org-channel: conda-canary anaconda-org-label: ${{ github.repository_owner }}-${{ github.event.repository.name }}-pr-${{ github.event.number }} - anaconda-org-token: ${{ secrets.ANACONDA_ORG_TOKEN }} + anaconda-org-token: ${{ secrets.ANACONDA_ORG_CONDA_CANARY_TOKEN }} comment-headline: Review build status comment-token: ${{ secrets.CANARY_ACTION_TOKEN }} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index c4ddd1fd90..ad56cf6e64 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -5,27 +5,31 @@ on: types: - created pull_request_target: - types: - - reopened - - opened - - synchronize jobs: check: if: >- !github.event.repository.fork && ( - github.event.comment.body == '@conda-bot check' + github.event.issue.pull_request + && github.event.comment.body == '@conda-bot check' || github.event_name == 'pull_request_target' ) runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v22.9.0 + uses: conda/actions/check-cla@1e442e090ad28c9b0f85697105703a303320ffd1 with: # [required] - # label to add when actor has signed the CLA + # A token with ability to comment, label, and modify the commit status + # (`pull_request: write` and `statuses: write` for fine-grained PAT; `repo` for classic PAT) + # (default: secrets.GITHUB_TOKEN) + token: ${{ secrets.CLA_ACTION_TOKEN }} + # [required] + # Label to apply to contributor's PR once CLA is signed label: cla-signed + # [required] - # the GitHub Personal Access Token to comment and label with - token: "${{ secrets.CLA_ACTION_TOKEN }}" + # Token for opening signee PR in the provided `cla_repo` + # (`pull_request: write` for fine-grained PAT; `repo` and `workflow` for classic PAT) + cla_token: ${{ secrets.CLA_FORK_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 42e73c5d35..11bd69c67b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,28 +7,28 @@ on: branches: - main paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** jobs: docs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 - name: Setup - run : | + run: | make env-docs - name: Build the docs - run : | + run: | cd docs conda run --name conda-build-docs make html diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 6c4eea15e0..7a114d6d41 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -11,28 +11,24 @@ env: SUPPORT_LBL: pending::support jobs: - # NOTE: doesn't catch cases where multiple users act as the author/reporter, - # this is just an effort to catch the majority of support cases + # NOTE: will update label if anyone responds, not just the author/reporter # TODO: create conda-issue-sorting team and modify this to toggle label based on # whether a non-issue-sorting engineer commented pending_support: - # if [pending::feedback] and the author responds + # if [pending::feedback] and anyone responds if: >- !github.event.repository.fork - && github.event_name == 'issue_comment' - && github.event.action == 'created' && !github.event.issue.pull_request && contains(github.event.issue.labels.*.name, 'pending::feedback') - && github.event.issue.user.login == github.event.comment.user.login runs-on: ubuntu-latest steps: # remove [pending::feedback] - - uses: actions-ecosystem/action-remove-labels@v1.3.0 + - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} - # add [pending::feedback], if still open - - uses: actions-ecosystem/action-add-labels@v1.1.0 + # add [pending::support], if still open + - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 072572a709..9ec951a22f 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: inputs: dryrun: - description: "dryrun: Preview changes to labels without editing them (true|false)" + description: 'dryrun: Preview changes to labels without editing them (true|false)' required: true type: boolean default: true @@ -19,20 +19,20 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 - id: has_local - uses: andstor/file-existence-action@v1.0.1 + uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@v2.3.0 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@v2.3.0 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index d46934651a..2204b62dda 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -4,12 +4,6 @@ on: # NOTE: github.event is workflow_dispatch payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch workflow_dispatch: - # inputs: - # dryrun: - # description: "dryrun: Preview locking issues/prs without marking them (true|false)" - # required: true - # type: boolean - # default: true schedule: - cron: 0 6 * * * @@ -23,37 +17,29 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - # - id: read_yaml - # uses: conda/actions/read-yaml@v22.2.1 - # with: - # path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: dessant/lock-threads@v2 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 with: # Number of days of inactivity before a closed issue is locked - issue-lock-inactive-days: 365 + issue-inactive-days: 365 # Do not lock issues created before a given timestamp, value must follow ISO 8601 - issue-exclude-created-before: '' + exclude-issue-created-before: '' # Do not lock issues with these labels, value must be a comma separated list of labels or '' - issue-exclude-labels: '' + exclude-any-issue-labels: '' # Labels to add before locking an issue, value must be a comma separated list of labels or '' - issue-lock-labels: 'locked' - # Comment to post before locking an issue - # issue-lock-comment: ${{ fromJSON(steps.read_yaml.outputs.value)['lock-issue'] }} + add-issue-labels: locked # Reason for locking an issue, value must be one of resolved, off-topic, too heated, spam or '' - issue-lock-reason: 'resolved' + issue-lock-reason: resolved # Number of days of inactivity before a closed pull request is locked - pr-lock-inactive-days: 365 + pr-inactive-days: 365 # Do not lock pull requests created before a given timestamp, value must follow ISO 8601 - pr-exclude-created-before: '' + exclude-pr-created-before: '' # Do not lock pull requests with these labels, value must be a comma separated list of labels or '' - pr-exclude-labels: '' + exclude-any-pr-labels: '' # Labels to add before locking a pull request, value must be a comma separated list of labels or '' - pr-lock-labels: 'locked' - # Comment to post before locking a pull request - # pr-lock-comment: ${{ fromJSON(steps.read_yaml.outputs.value)['lock-pr'] }} + add-pr-labels: locked # Reason for locking a pull request, value must be one of resolved, off-topic, too heated, spam or '' - pr-lock-reason: 'resolved' + pr-lock-reason: resolved - # Limit locking to only issues or pull requests, value must be one of issues, prs or '' - process-only: '' + # Limit locking to issues, pull requests or discussions, value must be a comma separated list of issues, prs, discussions or '' + process-only: issues, prs diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index fc5a9a3600..7d06584c86 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -13,7 +13,9 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v0.3.0 + - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 with: - project-url: https://github.com/orgs/conda/projects/2 + # issues are added to the Planning project + # PRs are added to the Review project + project-url: https://github.com/orgs/conda/projects/${{ github.event_name == 'issues' && 2 || 16 }} github-token: ${{ secrets.PROJECT_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index ce974c0d05..78f4ac5eee 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: inputs: dryrun: - description: "dryrun: Preview stale issues/prs without marking them (true|false)" + description: 'dryrun: Preview stale issues/prs without marking them (true|false)' required: true type: boolean default: true @@ -14,74 +14,83 @@ on: schedule: - cron: 0 4 * * * +permissions: + issues: write + pull-requests: write + jobs: stale: if: '!github.event.repository.fork' runs-on: ubuntu-latest strategy: matrix: - # The issues labeled "support" have a more aggressive stale/close timeline from the rest - only-issue-labels: ['type::support', ''] + include: + - only-issue-labels: '' + days-before-issue-stale: 365 + days-before-issue-close: 30 + # [type::support] issues have a more aggressive stale/close timeline + - only-issue-labels: type::support + days-before-issue-stale: 90 + days-before-issue-close: 21 steps: - - id: read_yaml - uses: conda/actions/read-yaml@v22.9.0 + - uses: conda/actions/read-yaml@1e442e090ad28c9b0f85697105703a303320ffd1 + id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v4 + + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e id: stale with: - # Idle number of days before marking issues stale (default: 60) - days-before-issue-stale: ${{ matrix.only-issue-labels && 21 || 365 }} - # Idle number of days before closing stale issues/PRs (default: 7) - days-before-issue-close: ${{ matrix.only-issue-labels && 7 || 30 }} - # Idle number of days before marking PRs stale (default: 60) + # Only issues with these labels are checked whether they are stale + only-issue-labels: ${{ matrix.only-issue-labels }} + + # Idle number of days before marking issues stale + days-before-issue-stale: ${{ matrix.days-before-issue-stale }} + # Idle number of days before closing stale issues/PRs + days-before-issue-close: ${{ matrix.days-before-issue-close }} + # Idle number of days before marking PRs stale days-before-pr-stale: 365 - # Idle number of days before closing stale PRs (default: 7) + # Idle number of days before closing stale PRs days-before-pr-close: 30 # Comment on the staled issues stale-issue-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-issue'] }} - # Comment on the staled issues while closed - # close-issue-message: ${{ fromJSON(steps.read_yaml.outputs.value)['close-issue'] }} - # Comment on the staled PRs - stale-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-pr'] }} - # Comment on the staled PRs while closed - # close-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['close-pr'] }} # Label to apply on staled issues - stale-issue-label: 'stale' + stale-issue-label: stale # Label to apply on closed issues - close-issue-label: 'stale::closed' + close-issue-label: stale::closed + # Reason to use when closing issues + close-issue-reason: not_planned + + # Comment on the staled PRs + stale-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-pr'] }} # Label to apply on staled PRs - stale-pr-label: 'stale' + stale-pr-label: stale # Label to apply on closed PRs - close-pr-label: 'stale::closed' + close-pr-label: stale::closed - # Issues with these labels will never be considered stale - exempt-issue-labels: 'stale::recovered,epic' - # Issues with these labels will never be considered stale - exempt-pr-labels: 'stale::recovered,epic' - # Only issues with these labels are checked whether they are stale - only-issue-labels: ${{ matrix.only-issue-labels }} - - # Max number of operations per run - operations-per-run: ${{ secrets.STALE_OPERATIONS_PER_RUN || 100 }} # Remove stale label from issues/PRs on updates/comments remove-stale-when-updated: true - # Add specified labels to issues/PRs when they become unstale - labels-to-add-when-unstale: 'stale::recovered' - labels-to-remove-when-unstale: 'stale,stale::closed' + labels-to-add-when-unstale: stale::recovered + # Remove specified labels to issues/PRs when they become unstale + labels-to-remove-when-unstale: stale,stale::closed - # Dry-run (default: false) + # Max number of operations per run + operations-per-run: ${{ secrets.STALE_OPERATIONS_PER_RUN || 100 }} + # Dry-run debug-only: ${{ github.event.inputs.dryrun || false }} - # Order to get issues/PRs (default: false) + # Order to get issues/PRs ascending: true - # Delete branch after closing a stale PR (default: false) + # Delete branch after closing a stale PR delete-branch: false + # Issues with these labels will never be considered stale + exempt-issue-labels: stale::recovered,epic + # Issues with these labels will never be considered stale + exempt-pr-labels: stale::recovered,epic # Exempt all issues/PRs with milestones from stale exempt-all-milestones: true - # Assignees on issues/PRs exempted from stale exempt-assignees: mingwandroid diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3ea15b4e7a..e548cc609a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,17 +1,25 @@ name: Tests on: - # NOTE: github.event context is push payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#push push: branches: - main - feature/** + - '[0-9].*.x' # e.g., 3.24.x + - '[0-9][0-9].*.x' # e.g., 23.3.x - # NOTE: github.event context is pull_request payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch + workflow_dispatch: + + # no payload + schedule: + # https://crontab.guru/#37_18_*_*_* + - cron: 37 18 * * * + concurrency: # Concurrency group that uses the workflow name and PR number if available # or commit SHA as a fallback. If a new build is triggered under that @@ -21,399 +29,519 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +env: + # https://conda.github.io/conda-libmamba-solver/user-guide/configuration/#advanced-options + CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED: true + jobs: + # detect whether any code changes are included in this PR + changes: + runs-on: ubuntu-latest + permissions: + # necessary to detect changes + # https://github.com/dorny/paths-filter#supported-workflows + pull-requests: read + outputs: + code: ${{ steps.filter.outputs.code }} + steps: + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 + # dorny/paths-filter needs git clone for non-PR events + # https://github.com/dorny/paths-filter#supported-workflows + if: github.event_name != 'pull_request' + + - name: Filter Changes + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 + id: filter + with: + filters: | + code: + - 'conda_build/**' + - 'tests/**' + - '*.py' + - 'recipe/**' + - '.github/workflows/tests.yml' + + # linux test suite linux: + # only run test suite if there are code changes + needs: changes + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + runs-on: ubuntu-latest defaults: run: - shell: bash -l {0} + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell + shell: bash -el {0} strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8'] + # test all lower versions (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.9', '3.10', '3.11'] conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.9' + # minimum Python/conda combo + - python-version: '3.8' + conda-version: 23.7.0 + test-type: serial + - python-version: '3.8' + conda-version: 23.7.0 + test-type: parallel + # maximum Python/conda combo + - python-version: '3.12' conda-version: canary test-type: serial - - python-version: '3.9' + - python-version: '3.12' conda-version: canary test-type: parallel env: - pytest-replay: --replay-record-dir=pytest-replay/ --replay-base-name=Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev::' || '' }} + CONDA_VERSION: ${{ contains('canary|release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }} + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV + + - name: Cache Conda + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 + with: + path: ~/conda_pkgs_dir + key: cache-${{ env.HASH }} + - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: - condarc-file: ci/github/.condarc - python-version: ${{ matrix.python-version }} + condarc-file: .github/condarc + run-post: false # skip post cleanup + + - name: Conda Install + run: > + conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}${{ env.CONDA_VERSION }} + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + # view test env info (not base) + run: python -m conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + run: > + pytest + --cov=conda_build + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 + with: + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Install Conda-Canary if canary version - if: matrix.conda-version == 'canary' - run: | - conda install -y -c conda-canary/label/dev conda + - name: Upload Test Results + if: '!cancelled()' + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 + with: + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below + + # linux benchmarks + linux-benchmarks: + # only run test suite if there are code changes + needs: changes + if: needs.changes.outputs.code == 'true' - - name: Install default Conda if release version - if: matrix.conda-version == 'release' - run: | - conda install -y conda + runs-on: ubuntu-latest + defaults: + run: + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell + shell: bash -el {0} # bash exit immediately on error + login shell + strategy: + fail-fast: false + matrix: + python-version: ['3.12'] - - name: Source Scripts - run: | - source ci/github/install_conda_build_test_deps - pip install -e . - conda info -a - mkdir ./pytest-replay + steps: + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 + with: + fetch-depth: 0 - - name: Run Serial tests - if: matrix.test-type == 'serial' - run: | - pytest \ - --color=yes \ - -vv \ - -n 0 \ - --basetemp /tmp/cb_serial \ - --cov conda_build \ - --cov-report xml \ - -m "serial" tests \ - ${{ env.pytest-replay }} - - - name: Run Parallel tests - if: matrix.test-type == 'parallel' - run: | - pytest \ - --color=yes \ - -vv \ - -n auto \ - --basetemp /tmp/cb \ - --cov conda_build \ - --cov-append \ - --cov-report xml \ - -m "not serial" tests \ - ${{ env.pytest-replay }} - - - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-benchmark-$(date -u "+%Y%m")" >> $GITHUB_ENV + + - name: Cache Conda + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: - name: Linux_replay-py-${{ matrix.python-version }}_${{ matrix.conda-version }}_${{ matrix.test-type }} - path: pytest-replay/ + path: ~/conda_pkgs_dir + key: cache-${{ env.HASH }} + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca + with: + condarc-file: .github/condarc + run-post: false # skip post cleanup + + - name: Conda Install + run: > + conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}${{ env.CONDA_VERSION }} + + - name: Install CodSpeed + run: pip install git+https://github.com/kenodegard/pytest-codspeed.git@fix-outerr-redirects#egg=pytest-codspeed + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + # view test env info (not base) + run: python -m conda info --verbose + + - name: Conda Config + run: conda config --show-sources + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Benchmarks + uses: CodSpeedHQ/action@0b631f8998f2389eb5144632b6f9f8fabd33a86e + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: $CONDA/envs/test/bin/pytest --codspeed + + # windows test suite windows: + # only run test suite if there are code changes + needs: changes + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + runs-on: windows-2019 strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8'] + # test lower version (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.8'] conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.9' + - python-version: '3.12' conda-version: canary test-type: serial - - python-version: '3.9' + - python-version: '3.12' conda-version: canary test-type: parallel - timeout-minutes: 120 env: - serial_or_parallel: '' + ErrorActionPreference: Stop # powershell exit on first error + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial and not slow' }} steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 - - name: Set temp dirs correctly - shell: cmd - # https://github.com/actions/virtual-environments/issues/712 - run: | - echo "TMPDIR=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV - echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV - echo "TMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV + - name: Hash + Timestamp + shell: bash # use bash to run date command + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - - name: Setup Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + - name: Cache Conda + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: - python-version: ${{ matrix.python-version }} + path: ~/conda_pkgs_dir + key: cache-${{ env.HASH }} - - name: Install vcpython27.msi - run: | - $wc = New-Object net.webclient - $wc.Downloadfile("https://github.com/GeoNode/geonode-win-installer/raw/ffb76c7cbf1d6b4970c6c25f79c3c7682a3aa035/VCForPython27.msi", "VCForPython27.msi") - Start-Process "VCForPython27.msi" /qn -Wait - shell: pwsh + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca + with: + condarc-file: .github\condarc + run-post: false # skip post cleanup + + - name: Choco Install + run: choco install visualstudio2017-workload-vctools + + - name: Conda Install + run: > + conda install + --yes + --file tests\requirements.txt + --file tests\requirements-${{ runner.os }}.txt + --file tests\requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}::conda + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + # view test env info (not base) + run: python -m conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to + # keep the test directories shorter + run: > + pytest + --cov=conda_build + --basetemp=${{ runner.temp }} + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 + with: + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Install miniconda - run: | - rmdir C:\Strawberry /s /q - choco install wget visualstudio2017-workload-vctools - wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe - start /wait "" Miniconda3-latest-Windows-x86_64.exe /InstallationType=JustMe /S /D=%UserProfile%\Miniconda3 - "%UserProfile%\Miniconda3\condabin\conda.bat" init - conda info -a - shell: cmd - - - name: Patch vs2008 - run: | - Set-PSDebug -Trace 1 - $batchcontent = @" - ECHO ON - SET vcpython=C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0 - DIR "%vcpython%" - CALL "%vcpython%\vcvarsall.bat" %* - "@ - $batchDir = "C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC" - $batchPath = "$batchDir" + "\vcvarsall.bat" - New-Item -Path $batchPath -ItemType "file" -Force - Set-Content -Value $batchcontent -Path $batchPath - Get-ChildItem -Path $batchDir - Get-ChildItem -Path ($batchDir + '\..') - set LIB - shell: pwsh - - - name: conda init - run: | - echo on - set PATH - doskey conda="call %UserProfile%\Miniconda3\condabin\conda.bat" $* - doskey /macros - call "%UserProfile%\Miniconda3\condabin\conda.bat" init - set PATH - shell: cmd - - - name: Configuration - run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - call conda install python="%PYTHON_VERSION%" -y||exit 1 - if "%CONDA_VERSION%" == "canary" (call conda update -c conda-canary/label/dev conda||exit 1) else (call conda update -q conda||exit 1) - call conda config --set always_yes yes - call conda config --set auto_update_conda no - call conda info - python -c "import sys; print(sys.version)" - python -c "import sys; print(sys.executable)" - python -c "import sys; print(sys.prefix)" - call conda update -q --all||exit 1 - call conda install -q pip python-libarchive-c pytest git pytest-cov jinja2 m2-patch flake8 mock requests contextlib2 chardet glob2 perl pyflakes pycrypto posix m2-git anaconda-client numpy beautifulsoup4 pytest-xdist pytest-mock filelock pkginfo psutil pytz tqdm conda-package-handling||exit 1 - call conda install pytest-replay pytest-rerunfailures -y||exit 1 - echo safety_checks: disabled >> %UserProfile%\.condarc - echo local_repodata_ttl: 1800 >> %UserProfile%\.condarc - call conda install -q py-lief||exit 1 - python --version - python -c "import struct; print(struct.calcsize('P') * 8)" - pip install --no-deps . - conda-build --version - pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd - mkdir %UserProfile%\cbtmp_serial - mkdir %UserProfile%\cbtmp - for /d %%F in (%UserProfile%\cbtmp_serial\*) do rd /s /q "%%F" - for /d %%F in (%UserProfile%\cbtmp\*) do rd /s /q "%%F" - call conda create -n blarg -yq --download-only python=2.7||exit 1 - call conda create -n blarg -yq --download-only python=3.8||exit 1 - call conda create -n blarg -yq --download-only python cmake||exit 1 - mkdir ${{ github.workspace }}\\pytest-replay - set "PYTEST_REPLAY_OPTIONS=--replay-record-dir=${{ github.workspace }}\\pytest-replay --replay-base-name=Win-%CONDA_VERSION%-Py%PYTHON_VERSION%" - echo "##vso[task.setvariable variable=PYTEST_REPLAY_OPTIONS]%PYTEST_REPLAY_OPTIONS%" - shell: cmd - - - name: Run Serial Tests - if: matrix.test-type == 'serial' - run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - call conda install -y conda-verify||exit 1 - set PERL= - set LUA= - set R= - pytest --color=yes -vv -n 0 --basetemp %UserProfile%\cbtmp_serial --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} - shell: cmd - - - name: Run Parallel Tests - if: matrix.test-type == 'parallel' - run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - :: call conda remove -y conda-verify||exit 1 - set PERL= - set LUA= - set R= - pytest --color=yes -vv -n auto --basetemp %UserProfile%\cbtmp --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} - shell: cmd - env: - VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin - LIB: - - - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + - name: Upload Test Results + if: '!cancelled()' + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: - path: ${{ github.workspace }}/pytest-replay - name: Win-${{ env.CONDA_VERSION }}-Python${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} - if: always() + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below + # macos test suite macos: + # only run test suite if there are code changes + needs: changes + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + + # Old macOS needed for old SDK (see xcode step below) + # This is needed for some MACOSX_DEPLOYMENT_TARGET tests + # We could also install SDKs from a external provider in the future + # if we want to update this runner to a non-deprecated version runs-on: macos-11 defaults: run: - shell: bash -l {0} + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell + shell: bash -el {0} strategy: fail-fast: false matrix: - python-version: ['3.9'] - conda-version: [canary] + # test lower version (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.8'] + conda-version: [release] test-type: [serial, parallel] - max-parallel: 10 - timeout-minutes: 120 + include: + - python-version: '3.12' + conda-version: canary + test-type: serial + - python-version: '3.12' + conda-version: canary + test-type: parallel env: - pytest-replay: --replay-record-dir=pytest-replay/ --replay-base-name=macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 - - name: Install miniconda - run: | - set -x -e -u - curl -o ${{ github.workspace }}/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - chmod +x ${{ github.workspace }}/miniconda.sh - ${{ github.workspace }}/miniconda.sh -b -p ${{ github.workspace }}/miniconda - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - - name: Setup CF MacOS Bits and Select Xcode - run: | - set -x -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - source ci/github/run_conda_forge_build_setup_osx + - name: Cache Conda + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 + with: + path: ~/conda_pkgs_dir + key: cache-${{ env.HASH }} - - name: Prepare Test Environment - run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda info - conda list - grep '^#' "${CONDA_PREFIX}/conda-meta/history" - conda install python=${{ matrix.python-version }} -y - mkdir -p ${{ github.workspace }}/miniconda/locks - mkdir -p ${{ github.workspace }}/miniconda/bin - chmod -w ${{ github.workspace }}/miniconda/locks - conda install -y -c conda-canary/label/dev conda - conda config --set always_yes yes - conda config --set auto_update_conda False - conda info - source ci/github/install_conda_build_test_deps - pip install -e . - conda info -a - conda list --show-channel-urls - - - name: Run Serial Tests - if: matrix.test-type == 'serial' - run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda install conda-verify -y - pytest --color=yes -v -n 0 --basetemp /tmp/cb_serial --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca + with: + condarc-file: .github/condarc + run-post: false # skip post cleanup + + - name: Xcode Install + run: sudo xcode-select --switch /Applications/Xcode_11.7.app + + - name: Conda Install + run: > + conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}::conda + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + # view test env info (not base) + run: python -m conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + run: > + pytest + --cov=conda_build + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 + with: + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Run Parallel Tests - if: matrix.test-type == 'parallel' - run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda remove conda-verify -y - echo "safety_checks: disabled" >> ~/.condarc - echo "local_repodata_ttl: 1800" >> ~/.condarc - mkdir -p ~/.conda - conda create -n blarg1 -yq python=2.7 - conda create -n blarg3 -yq python=3.7 - conda create -n blarg4 -yq python nomkl numpy pandas svn - pytest --color=yes -v -n auto --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" - - - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + - name: Upload Test Results + if: '!cancelled()' + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: - path: ${{ github.workspace }}/pytest-replay - name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} - if: always() + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below + + # aggregate and upload + aggregate: + # only aggregate test suite if there are code changes + needs: [changes, linux, linux-benchmarks, windows, macos] + if: >- + !cancelled() + && ( + github.event_name == 'schedule' + || needs.changes.outputs.code == 'true' + ) - analyze: - name: Analyze test results - needs: [windows, linux, macos] - if: always() runs-on: ubuntu-latest steps: - - name: Download test results - uses: actions/download-artifact@v3 + - name: Download Artifacts + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e - - name: Upload combined test results - # provides one downloadable archive of all .coverage/test-report.xml files - # of all matrix runs for further analysis. - uses: actions/upload-artifact@v3 + - name: Upload Combined Test Results + # provides one downloadable archive of all matrix run test results for further analysis + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ github.sha }}-all - path: test-results-${{ github.sha }}-* - retention-days: 90 # default: 90 + path: test-results-* - name: Test Summary - uses: test-summary/action@v2 + uses: test-summary/action@032c8a9cec6aaa3c20228112cae6ca10a3b29336 with: - paths: ./test-results-${{ github.sha }}-**/test-report*.xml + paths: test-results-*/test-report.xml + + # required check + analyze: + needs: [linux, linux-benchmarks, windows, macos, aggregate] + if: '!cancelled()' - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 + runs-on: ubuntu-latest + steps: + - name: Determine Success + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe with: + # permit jobs to be skipped if there are no code changes (see changes job) + allowed-skips: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }} + # canary builds build: - name: Canary Build needs: [analyze] - # only build canary build iff + # only build canary build if # - prior steps succeeded, # - this is the main repo, and - # - we are on the main (or feature) branch + # - we are on the main, feature, or release branch if: >- - success() + !cancelled() && !github.event.repository.fork && ( github.ref_name == 'main' || startsWith(github.ref_name, 'feature/') + || endsWith(github.ref_name, '.x') ) strategy: matrix: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-13 subdir: osx-64 + - runner: macos-14 # FUTURE: Use -latest + subdir: osx-arm64 - runner: windows-latest subdir: win-64 runs-on: ${{ matrix.runner }} steps: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - - name: Create and upload canary build - uses: conda/actions/canary-release@v22.10.0 + # Explicitly use Python 3.12 since each of the OSes has a different default Python + - name: Setup Python + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d + with: + python-version: '3.12' + + - name: Detect Label + shell: python + run: | + from pathlib import Path + from re import match + from os import environ + + if "${{ github.ref_name }}" == "main": + # main branch commits are uploaded to the dev label + label = "dev" + elif "${{ github.ref_name }}".startswith("feature/"): + # feature branch commits are uploaded to a custom label + label = "${{ github.ref_name }}" + else: + # release branch commits are added to the rc label + # see https://github.com/conda/infrastructure/issues/760 + _, name = "${{ github.repository }}".split("/") + label = f"rc-{name}-${{ github.ref_name }}" + + Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") + + - name: Create & Upload + uses: conda/actions/canary-release@1e442e090ad28c9b0f85697105703a303320ffd1 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} anaconda-org-channel: conda-canary - anaconda-org-label: ${{ github.ref_name == 'main' && 'dev' || github.ref_name }} + anaconda-org-label: ${{ env.ANACONDA_ORG_LABEL }} anaconda-org-token: ${{ secrets.ANACONDA_ORG_CONDA_CANARY_TOKEN }} diff --git a/.gitignore b/.gitignore index af26fb7888..0e1abb2d0b 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,6 @@ conda-build-testing rever/ **/.vscode **/_build + +# setuptools-scm +conda_build/_version.py diff --git a/.mailmap b/.mailmap index 6d590c020a..02df1bf754 100644 --- a/.mailmap +++ b/.mailmap @@ -70,23 +70,29 @@ Dan Lovell dlovell Daniel Bast <2790401+dbast@users.noreply.github.com> Daniel Damiani Daniel Holth +Daniel Petry danpetry Darren Dale Dave Clements Dave Hirschfeld David Hirschfeld +Dave Karetnyk David Froger David Li Derek Ludwig Devon Ryan dpryan79 Diogo de Campos Dougal J. Sutherland -Duncan Macleod +Duncan Macleod Duncan Macleod Ed Campbell Eli Rykoff erykoff Elliot Hughes Eric Dill +Ernst Luring Evan Hubinger Evan Klitzke +Felix Kühnl +Ferry Firmansjah <103191403+ffirmanff@users.noreply.github.com> Filipe Fernandes ocefpaf +Finn Womack Floris Bruynooghe Gabriel Reis Gaëtan de Menten @@ -94,6 +100,7 @@ Geir Ove Myhr Greg Brener Guilherme Quentel Melo Guillaume Baty +H. Vetinari h-vetinari Harsh Gupta Harun Reşit Zafer Heather Kelly heather999 @@ -104,6 +111,7 @@ Ilan Schnell Ilan Schnell Ilan Schnell Isuru Fernando Isuru Fernando Ivan Kalev ikalev +Jack Olivieri Jacob Walls Jaime Rodríguez-Guerra James Abbott @@ -125,13 +133,15 @@ John Kirkham jakirkham John Kirkham John Kirkham John Kirkham John Kirkham John Omotani +Johnny Johnny Jonathan J. Helmus Jonathan Helmus +Jose Diaz-Gonzalez Joseph Crail Joseph Hunkeler Juan Lasheras jlas Julian Rüth Julien Schueller -Jürgen Gmach +Jürgen Gmach Jürgen Gmach Jędrzej Nowak Jedrzej Nowak Kai Tietz Kai Tietz <47363620+katietz@users.noreply.github.com> Kale Franz Kale Franz @@ -174,8 +184,10 @@ Michael Cormier Michael Maltese Michael Sarahan Mike Sarahan Michael Sarahan Mike Sarahan +Michael Sarahan Mike Sarahan Michael Sarahan Michael Sarahan Michael Sarahan Michael Sarahan +Michael Sarahan Michael Sarahan Min RK MinRK Morten Enemark Lund mel Morten Lund @@ -186,6 +198,7 @@ Oleksandr Pavlyk Patrick Snape Patrick Snape Patrick Sodré Paul Madden +Peet Whittaker Peter Williams Phil Elson pelson Phil Reinhold @@ -199,10 +212,12 @@ Rachel Rigdon rrigdon <45607889+rrigdon@users.noreply.git Rachel Rigdon rrigdon Ray Donnelly Remi Chateauneu +Riadh Fezzani Riccardo Vianello Richard Frank Richard Hattersley Rick Izzo +Rishabh Singh <67859818+rishabh11336@users.noreply.github.com> rishabh11336 <67859818+rishabh11336@users.noreply.github.com> Robert Coop Robert Langlois Robert T. McGibbon Robert McGibbon @@ -212,6 +227,7 @@ Ruben Vorderman Ryan Dale daler Ryan Grout Ryan Grout Ryan Grout Ryan Grout +Ryan Keith Ryan Rylan Chord Satoshi Yagi satoshi Scheah @@ -221,18 +237,21 @@ Sean Yen seanyen Sergio Oller Serhii Kupriienko <79282962+skupr-anaconda@users.noreply.github.com> Shaun Walbridge +Shaun Walbridge <46331011+scdub@users.noreply.github.com> Siu Kwan Lam Sophia Castellarin sophia Sophia Castellarin sophia Sophia Castellarin soapy1 Sophia Castellarin soapy1 Sophian Guidara +Srivas Venkatesh <110486050+sven6002@users.noreply.github.com> Stas Bekman Stefan Scherfke Stefan Zimmermann Stephan Hoyer Stephen Palmroth spalmrot-tic Stuart Berg +T Coxon <97948946+tttc3@users.noreply.github.com> Tadeu Manoel Tadeu Manoel Takafumi Arakaki Teake Nutma Teake Nutma @@ -256,6 +275,7 @@ Uwe L. Korn Vlad Frolov Wes Turner Wim Glenn wim glenn +Wolf Vollprecht Wolfgang Ulmer Yann Yoav Ram diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15c4a460d1..fb922265c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,25 +1,24 @@ # disable autofixing PRs, commenting "pre-commit.ci autofix" on a pull request triggers a autofix ci: - autofix_prs: false + autofix_prs: false # generally speaking we ignore all vendored code as well as tests data # ignore patches/diffs since slight reformatting can break them exclude: | (?x)^( + conda_build/version.py | tests/( archives | index_data | test-cran-skeleton | test-recipes | - test-skeleton | - variant_recipe + test-skeleton )/ | - .*\.(patch|diff) | - versioneer.py | - conda_build/_version.py + .*\.(patch|diff) ) repos: + # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: # standard end of line/end of file cleanup - id: mixed-line-ending @@ -37,40 +36,72 @@ repos: ) # catch git merge/rebase problems - id: check-merge-conflict - - repo: https://github.com/asottile/pyupgrade - rev: v3.2.2 - hooks: - - id: pyupgrade - args: ["--py37-plus"] - - repo: https://github.com/akaihola/darker - rev: 1.5.1 + # sort requirements files + - id: file-contents-sorter + files: | + (?x)^( + docs/requirements.txt | + tests/requirements.*\.txt + ) + args: [--unique] + # Python verification and formatting + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.5.5 hooks: - - id: darker - additional_dependencies: [black] + # auto inject license blurb + - id: insert-license + files: \.py$ + args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.1 + rev: 1.16.0 hooks: + # auto format Python codes within docstrings - id: blacken-docs - additional_dependencies: [black] - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.2 hooks: - - id: flake8 - - repo: https://github.com/PyCQA/pylint - rev: v2.15.6 + # lint & attempt to correct failures (e.g. pyupgrade) + - id: ruff + args: [--fix] + # compatible replacement for black + - id: ruff-format + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.13.0 hooks: - - id: pylint - args: [--exit-zero] - - repo: https://github.com/PyCQA/bandit - rev: 1.7.4 + - id: pretty-format-toml + args: [--autofix, --trailing-commas] + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 hooks: - - id: bandit - args: [--exit-zero] - # ignore all tests, not just tests data - exclude: ^tests/ - - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.3.1 + - id: yamlfmt + # ruamel.yaml doesn't line wrap correctly (?) so set width to 1M to avoid issues + args: [--mapping=2, --offset=2, --sequence=4, --width=1000000, --implicit_start] + exclude: | + (?x)^( + .authors.yml | + conda_build/templates/npm.yaml | + conda_build/templates/setuptools.yaml | + docs/click/meta.yaml | + docs/source/user-guide/tutorials/meta.yaml | + recipe/meta.yaml | + tests/ + ) + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.2 hooks: - - id: insert-license - files: \.py$ - args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] + # verify github syntaxes + - id: check-github-workflows + - id: check-dependabot + - repo: meta + # see https://pre-commit.com/#meta-hooks + hooks: + - id: check-hooks-apply + - id: check-useless-excludes + - repo: local + hooks: + - id: git-diff + name: git diff + entry: git diff --exit-code + language: system + pass_filenames: false + always_run: true diff --git a/.readthedocs.yml b/.readthedocs.yml index 72ad6563db..64f8768db5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,6 +1,16 @@ version: 2 +build: + os: ubuntu-22.04 + tools: + python: '3.11' + python: - version: "3" install: - - requirements: docs/requirements.txt + - requirements: docs/requirements.txt + +# Build PDF, ePub and zipped HTML +formats: + - epub + - pdf + - htmlzip diff --git a/AUTHORS.md b/AUTHORS.md index 36b300279f..969994f016 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -53,9 +53,11 @@ Authors are sorted alphabetically. * Daniel Bast * Daniel Damiani * Daniel Holth +* Daniel Petry * Darren Dale * Dave Clements * Dave Hirschfeld +* Dave Karetnyk * David Froger * David Li * Derek Ludwig @@ -67,9 +69,13 @@ Authors are sorted alphabetically. * Eli Rykoff * Elliot Hughes * Eric Dill +* Ernst Luring * Evan Hubinger * Evan Klitzke +* Felix Kühnl +* Ferry Firmansjah * Filipe Fernandes +* Finn Womack * Floris Bruynooghe * Gabriel Reis * Gaëtan de Menten @@ -77,6 +83,7 @@ Authors are sorted alphabetically. * Greg Brener * Guilherme Quentel Melo * Guillaume Baty +* H. Vetinari * Harsh Gupta * Harun Reşit Zafer * Heather Kelly @@ -86,6 +93,7 @@ Authors are sorted alphabetically. * Ilan Schnell * Isuru Fernando * Ivan Kalev +* Jack Olivieri * Jacob Walls * Jaime Rodríguez-Guerra * James Abbott @@ -104,7 +112,9 @@ Authors are sorted alphabetically. * John Blischak * John Kirkham * John Omotani +* Johnny * Jonathan J. Helmus +* Jose Diaz-Gonzalez * Joseph Crail * Joseph Hunkeler * Juan Lasheras @@ -156,6 +166,7 @@ Authors are sorted alphabetically. * Patrick Snape * Patrick Sodré * Paul Madden +* Peet Whittaker * Peter Williams * Phil Elson * Phil Reinhold @@ -164,10 +175,12 @@ Authors are sorted alphabetically. * Rachel Rigdon * Ray Donnelly * Remi Chateauneu +* Riadh Fezzani * Riccardo Vianello * Richard Frank * Richard Hattersley * Rick Izzo +* Rishabh Singh * Robert Coop * Robert Langlois * Robert T. McGibbon @@ -176,6 +189,7 @@ Authors are sorted alphabetically. * Ruben Vorderman * Ryan Dale * Ryan Grout +* Ryan Keith * Rylan Chord * Satoshi Yagi * Scheah @@ -184,15 +198,18 @@ Authors are sorted alphabetically. * Sergio Oller * Serhii Kupriienko * Shaun Walbridge +* Shaun Walbridge * Siu Kwan Lam * Sophia Castellarin * Sophian Guidara +* Srivas Venkatesh * Stas Bekman * Stefan Scherfke * Stefan Zimmermann * Stephan Hoyer * Stephen Palmroth * Stuart Berg +* T Coxon * Tadeu Manoel * Takafumi Arakaki * Teake Nutma @@ -215,6 +232,7 @@ Authors are sorted alphabetically. * Vlad Frolov * Wes Turner * Wim Glenn +* Wolf Vollprecht * Wolfgang Ulmer * Yann * Yoav Ram diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e05ca33f3..42d745f874 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,638 @@ [//]: # (current developments) +## 24.3.0 (2024-03-15) + +### Enhancements + +* Add compatibility for `LIEF=0.14`. (#5227 via #5228) + +### Bug fixes + +* Fix `stdlib` being recognized in variant hash inputs. (#5190 via #5195) + +### Deprecations + +* Mark `conda_build.bdist_conda` module as pending deprecation. (#5196) +* Mark `conda_build.build.have_prefix_files` as deprecated. (#5199) +* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203) +* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203) +* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203) +* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.cache_actions` as deprecated. (#5203) +* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203) +* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203) +* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203) +* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203) +* Postpone `conda_build.index.channel_data` deprecation. (#5203) +* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203) +* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203) +* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203) +* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203) +* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203) +* Remove `conda_build.conda_interface.Dist`. (#5203) +* Remove `conda_build.conda_interface.display_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_plan`. (#5203) +* Remove `conda_build.conda_interface.install_actions`. (#5203) +* Remove `conda_build.conda_interface.linked`. (#5203) +* Remove `conda_build.conda_interface.linked_data`. (#5203) +* Remove `conda_build.conda_interface.package_cache`. (#5203) +* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203) +* Remove `conda_build.index._determine_namespace`. (#5203) +* Remove `conda_build.index._make_seconds`. (#5203) +* Remove `conda_build.index.REPODATA_VERSION`. (#5203) +* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203) +* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203) +* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) +* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) +* Remove `conda_build.index._clear_newline_chars`. (#5203) +* Remove `conda_build.index._get_jinja2_environment`. (#5203) +* Remove `conda_build.index._maybe_write`. (#5203) +* Remove `conda_build.index._make_build_string`. (#5203) +* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203) +* Remove `conda_build.index._cache_post_install_details`. (#5203) +* Remove `conda_build.index._cache_recipe`. (#5203) +* Remove `conda_build.index._cache_run_exports`. (#5203) +* Remove `conda_build.index._cache_icon`. (#5203) +* Remove `conda_build.index._make_subdir_index_html`. (#5203) +* Remove `conda_build.index._make_channeldata_index_html`. (#5203) +* Remove `conda_build.index._get_source_repo_git_info`. (#5203) +* Remove `conda_build.index._cache_info_file`. (#5203) +* Remove `conda_build.index._alternate_file_extension`. (#5203) +* Remove `conda_build.index._get_resolve_object`. (#5203) +* Remove `conda_build.index._get_newest_versions`. (#5203) +* Remove `conda_build.index._add_missing_deps`. (#5203) +* Remove `conda_build.index._add_prev_ver_for_features`. (#5203) +* Remove `conda_build.index._shard_newest_packages`. (#5203) +* Remove `conda_build.index._build_current_repodata`. (#5203) +* Remove `conda_build.index.ChannelIndex`. (#5203) +* Remove `conda_build.inspect.check_install('prepend')`. (#5203) +* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203) +* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203) +* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203) +* Remove `conda_build.noarch_python._error_exit`. (#5203) +* Remove `conda_build.render.actions_to_pins`. (#5203) +* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203) +* Mark `conda_build.api.get_output_file_path` as deprecated. Use `conda_build.api.get_output_file_paths` instead. (#5208) +* Mark `conda_build.environ.Environment` as deprecated. Use `conda.core.prefix_data.PrefixData` instead. (#5219) +* Mark `conda_build.conda_interface.get_version_from_git_tag` as deprecated. Use `conda_build.environ.get_version_from_git_tag` instead. (#5221) + +### Docs + +* Update advice for installing conda-build into base environment. (#5223) + +### Other + +* Add a check to print an additional warning and return an empty string when bits is "arm64" in `msvc_env_cmd`. (#4867) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @finnagin made their first contribution in https://github.com/conda/conda-build/pull/4867 +* @kathatherine +* @kenodegard +* @mbargull +* @minrk +* @ryanskeith +* @travishathaway +* @pre-commit-ci[bot] + + + +## 24.1.2 (2024-02-15) + +### Bug fixes + +* Fix rpaths patcher being run on symbolic links. (#5179 via #5181) +* Fix corrupted package cache for outputs in subpackage tests. (#5184) + +### Contributors + +* @mbargull + + + +## 24.1.1 (2024-02-07) + +### Bug fixes + +* Fix nonzero exitcode on success. (#5167 via #5169) + +### Contributors + +* @kenodegard + + + +## 24.1.0 (2024-01-25) + +### Enhancements + +* Update `conda inspect channels` to use updated solver/transaction logic. (#5033) +* Relax `script_env` error in outputs when variable referenced in `script_env` is not defined. + This unifies current behavior with the top-level build. (#5105) +* Add support for Python 3.12. (#4997 via #4998) +* Adopt calender versioning (CalVer) per CEP-8 for consistency with conda. (#4975) +* Adopt expedited CEP-9 deprecation policy. (#5064) + +### Deprecations + +* Mark `conda inspect channels --test-installable` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) +* Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) +* Mark `conda_build.conda_interface.Dist` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.display_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.execute_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.execute_plan` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.get_index` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.install_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.linked` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.linked_data` as pending deprecation. (#5074) +* Mark `conda_build.utils.linked_data_no_multichannels` as pending deprecation. (#5074) +* Mark `conda_build.environ.get_install_actions` as pending deprecation in favor of `conda_build.environ.get_package_records`. (#5152) +* Mark `conda_build.environ.create_env(specs_or_actions)` as pending deprecation in favor of `conda_build.environ.create_env(specs_or_precs)`. (#5152) +* Mark `conda_build.index.channel_data` as pending deprecation. (#5152) +* Mark `conda_build.index._determine_namespace` as pending deprecation. (#5152) +* Mark `conda_build.index._make_seconds` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_FROM_PKGS_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_FIELDS` as pending deprecation. (#5152) +* Mark `conda_build.index._clear_newline_chars` as pending deprecation. (#5152) +* Mark `conda_build.index._apply_instructions` as pending deprecation. (#5152) +* Mark `conda_build.index._get_jinja2_environment` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_write` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_build_string` as pending deprecation. (#5152) +* Mark `conda_build.index._warn_on_missing_dependencies` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_post_install_details` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_recipe` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_run_exports` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_icon` as pending deprecation. (#5152) +* Mark `conda_build.index._make_subdir_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._make_channeldata_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._get_source_repo_git_info` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_info_file` as pending deprecation. (#5152) +* Mark `conda_build.index._alternate_file_extension` as pending deprecation. (#5152) +* Mark `conda_build.index._get_resolve_object` as pending deprecation. (#5152) +* Mark `conda_build.index._get_newest_versions` as pending deprecation. (#5152) +* Mark `conda_build.index._add_missing_deps` as pending deprecation. (#5152) +* Mark `conda_build.index._add_prev_ver_for_features` as pending deprecation. (#5152) +* Mark `conda_build.index._shard_newest_packages` as pending deprecation. (#5152) +* Mark `conda_build.index._build_current_repodata` as pending deprecation. (#5152) +* Mark `conda_build.index.ChannelIndex` as pending deprecation. (#5152) +* Mark `conda_build.render.actions_to_pins` as pending deprecation. (#5152) +* Mark `conda_build.render.execute_download_actions(actions)` as pending deprecation in favor of `conda_build.render.execute_download_actions(precs)`. (#5152) +* Mark `conda_build.render.get_upstream_pins(actions)` as pending deprecation in favor of `conda_build.render.get_upstream_pins(precs)`. (#5152) +* Remove `conda_build.api.update_index`. (#5151) +* Remove `conda_build.cli.main_build.main`. (#5151) +* Remove `conda_build.cli.main_convert.main`. (#5151) +* Remove `conda_build.cli.main_debug.main`. (#5151) +* Remove `conda_build.cli.main_develop.main`. (#5151) +* Remove `conda_build.cli.main_index`. (#5151) +* Remove `conda_build.cli.main_inspect.main`. (#5151) +* Remove `conda_build.cli.main_metapackage.main`. (#5151) +* Remove `conda_build.cli.main_render.main`. (#5151) +* Remove `conda_build.cli.main_skeleton.main`. (#5151) +* Remove `conda_build.conda_interface.IndexRecord`. (#5151) +* Remove `conda_build.conda_interface.CrossPlatformStLink`. (#5151) +* Remove `conda_build.conda_interface.SignatureError`. (#5151) +* Remove `conda_build.conda_interface.which_package`. (#5151) +* Remove `conda_build.conda_interface.which_prefix`. (#5151) +* Remove `conda_build.conda_interface.get_installed_version`. (#5151) +* Remove `conda_build.config.python2_fs_encode`. (#5151) +* Remove `conda_build.config._ensure_dir`. (#5151) +* Remove `conda_build.config.Config.CONDA_LUA`. (#5151) +* Remove `conda_build.config.Config.CONDA_PY`. (#5151) +* Remove `conda_build.config.Config.CONDA_NPY`. (#5151) +* Remove `conda_build.config.Config.CONDA_PERL`. (#5151) +* Remove `conda_build.config.Config.CONDA_R`. (#5151) +* Remove `conda_build.environ.clean_pkg_cache`. (#5151) +* Remove `conda_build.index.update_index`. (#5151) +* Remove `conda_build.inspect_pkg.dist_files`. (#5151) +* Remove `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)`. (#5151) +* Remove `conda_build.inspect_pkg._installed`. (#5151) +* Remove `conda_build.metadata.Metadata.name(fail_ok)`. (#5151) +* Remove `conda_build.os_utils.ldd.get_package_files`. (#5151) +* Remove `conda_build.os_utils.liefldd.is_string`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type_liefldd`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type_pyldd`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type`. (#5151) +* Remove `conda_build.os_utils.pyldd.mach_o_change`. (#5151) +* Remove `conda_build.os_utils.pyldd.is_codefile`. (#5151) +* Remove `conda_build.os_utils.pyldd.codefile_type`. (#5151) +* Remove `conda_build.os_utils.pyldd.inspect_rpaths`. (#5151) +* Remove `conda_build.os_utils.pyldd.get_runpaths`. (#5151) +* Remove `conda_build.os_utils.pyldd.otool_sys`. (#5151) +* Remove `conda_build.os_utils.pyldd.ldd_sys`. (#5151) +* Remove `conda_build.plugin.index`. (#5151) +* Remove `conda_build.post.determine_package_nature`. (#5151) +* Remove `conda_build.post.library_nature(subdir)`. (#5151) +* Remove `conda_build.post.library_nature(bldpkgs_dirs)`. (#5151) +* Remove `conda_build.post.library_nature(output_folder)`. (#5151) +* Remove `conda_build.post.library_nature(channel_urls)`. (#5151) +* Remove `conda_build.post.dists_from_names`. (#5151) +* Remove `conda_build.post.FakeDist`. (#5151) +* Remove `conda_build.post._get_fake_pkg_dist`. (#5151) +* Remove `conda_build.utils.relative`. (#5151) +* Remove `conda_build.utils.samefile`. (#5151) + +### Docs + +* Add GoatCounter (https://www.goatcounter.com/) as an analytics tool. (#5093) + +### Other + +* Remove unused Allure test report collection. (#5113) +* Remove dependency on `conda.plan`. (#5074) +* Remove almost all dependency on `conda.models.dist`. (#5074) +* Replace usage of legacy `conda.models.dist.Dist` with `conda.models.records.PackageRecord`. (#5074) + +### Contributors + +* @conda-bot +* @dholth +* @jaimergp +* @jezdez +* @johnnynunez +* @kenodegard +* @msarahan +* @travishathaway +* @pre-commit-ci[bot] + + + +## 3.28.4 (2024-01-17) + +### Bug fixes + +* Fix linking check regressions by restoring pre-3.28 behavior for `conda_build.inspect_pkg.which_package`. (#5141) + +### Contributors + +* @mbargull + + + +## 3.28.3 (2024-01-04) + +### Bug fixes + +* Update `conda_build.os_utils.liefldd.ensure_binary` to handle `None` inputs. (#5123 via #5124) +* Update `conda_build.inspect_pkg.which_package` to use a cached mapping of paths to packages (first call: `O(n)`, subsequent calls: `O(1)`) instead of relying on `Path.samefile` comparisons (`O(n * m)`). (#5126 via #5130) + +### Contributors + +* @kenodegard + + + +## 3.28.2 (2023-12-15) + +### Enhancements + +* Update `conda_build.metadata.MetaData.get_section` to consistently return lists for "source" and "outputs". (#5111 via #5112) + +### Bug fixes + +* Resolve duplicate package record issue in `conda_build.inspect_pkg.which_package`. (#5106 via #5108) +* Ensure `conda_build.post._lookup_in_prefix_packages` displays `str(PackageRecord)` instead of `repr(PackageRecord)`. (#5106 via #5108) +* Fix finalization of recipes with multiple sources. (#5111 via #5112) +* Improve handling by catching the more general `ImportError` instead of `ModuleNotFoundError` to cover cases involving `menuinst 1.x`. (#5116) + +### Contributors + +* @jaimergp +* @kenodegard + + + +## 3.28.1 (2023-12-06) + +### Bug fixes + +* Relax `conda_build.metadata.MetaData.version` checks when `outputs` have been defined. (#5096) +* Remove `lief` from `pyproject.toml` since it causes `pip check` to fail. To be re-added in the future after an update to `py-lief` package. (#5099) + +### Contributors + +* @dholth +* @kenodegard + + + +## 3.28.0 (2023-11-30) + +### Special announcement + +In the upcoming January 2024 release of conda-build, significant changes are underway. We're set to transition to the [CalVer](https://calver.org/) versioning system. Additionally, we'll be formally embracing [CEP 8](https://github.com/conda-incubator/ceps/blob/main/cep-8.md) to manage our release schedule. Moreover, an expedited version of [CEP 9](https://github.com/conda-incubator/ceps/blob/main/cep-8.md) will be adopted for deprecation handling, omitting the pending deprecation phase and streamlining the period from deprecation to removal to a mere 2 months. + +### Enhancements + +* Add `stblib` jinja function similar to `compiler` to explicitly define sysroot dependencies. (#4999) +* Utilize conda-known subdirs for selector definitions, enabling conda_build to support new architectures with only an updated conda version. New OS support requires additional information for proper conda_build functionality, including UNIX-like platform designation, shared library prefix, and binary archive format for the platform. (#5009) +* Eliminate unnecessary cache clearing from `conda_build.build.test`. (#5031) +* Consolidate `which_package` implementations and replace `conda.models.dist.Dist` usage with `conda.models.records.PrefixRecords`. (#5041) + +### Bug fixes + +* Display package file name in `get_hash_input`. (#5021) +* Fall back to solved record filename to locate the downloaded tarball in `get_upstream_pins`. (#4991 via #5037) +* Prevent overwriting of variants in high priority cbc.yaml entries when absent in lower priority cbc.yamls. (#5039) +* Correct the check for a missing anaconda-client to display a useful error message. (#5050) +* Fix conda_index.index verbose DEBUG/INFO message logging. (#5066) + +### Deprecations + +* Mark `conda_build.environ.clean_pkg_cache` for pending deprecation. (#5031) +* Mark `conda_build.conda_interface.IndexRecord` for pending deprecation. Use `conda.models.records.PackageRecord` instead. (#5032) +* Mark `conda_build.os_utils.pyldd.is_string` for pending deprecation. Use `isinstance(value, str)` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.is_codefile` for pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.codefile_type` for pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) +* Mark `conda_build.inspect_pkg.dist_files` for pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)` for pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg._installed` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.ldd.get_package_files` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.mach_o_change` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.inspect_rpath` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.get_runpaths` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.otool_sys` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.ldd_sys` for pending deprecation. (#5041) +* Mark `conda_build.post.determine_package_nature` for pending deprecation. Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead. (#5041) +* Mark `conda_build.post.library_nature(subdir, bldpkgs_dirs, output_folder, channel_urls)` for pending deprecation. (#5041) +* Mark `conda_build.post.dist_from_names` for pending deprecation. Query `conda.core.prefix_data.PrefixData` instead. (#5041) +* Mark `conda_build.post.FakeDist` for pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.post._get_fake_pkg_dist` for pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.utils.relative` for pending deprecation. Use `os.path.relpath` or `pathlib.Path.relative_to` instead. (#5042) + +### Docs + +* Incorporate the conda-sphinx-theme into conda-build documentation. (#5067) +* Update certain pages to remove redundant TOC entries. (#5067) + +### Other + +* Implement Ruff linter in pre-commit configuration. (#5015) +* Replace `black` with `ruff format` in pre-commit setup. (#5052) +* Identify Unicode tests as incompatible with `libmamba`. (#5059) + + +### Contributors + +* @conda-bot +* @danpetry made their first contribution in https://github.com/conda/conda-build/pull/5039 +* @duncanmmacleod +* @h-vetinari made their first contribution in https://github.com/conda/conda-build/pull/4999 +* @isuruf +* @jaimergp +* @jakirkham +* @kenodegard +* @mbargull +* @travishathaway +* @pre-commit-ci[bot] + + + +## 3.27.0 (2023-09-26) + +### Enhancements + +* Remove `glob2` dependency. As of Python 3.5, the '**', operator was available to `glob` when using `recursive=True`. Builtin glob is also much faster. (#5005) +* Handle `emscripten-wasm32` and `wasi-wasm32` platforms. (#4813) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding import-time side effects. (#4949) + +### Deprecations + +* When templating new recipes from a PyPI package, the build script `{{ PYTHON }} -m pip install . -vv` is deprecated in favor of `{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation`. (#4960) + +### Docs + +* Document `~=` (compatibility release) match spec. (#4553) +* Clarify that the `build` prefix is activated _after_ the `host` prefix. (#4942) +* Add explanation that conda-build should be run from the base environment. (#4995) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @DaveKaretnyk made their first contribution in https://github.com/conda/conda-build/pull/5004 +* @boldorider4 made their first contribution in https://github.com/conda/conda-build/pull/4960 +* @jaimergp +* @jezdez +* @jugmac00 +* @kenodegard +* @ryanskeith +* @scdub made their first contribution in https://github.com/conda/conda-build/pull/4965 +* @wolfv made their first contribution in https://github.com/conda/conda-build/pull/4813 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + +## 3.26.1 (2023-08-17) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding + import-time side effects including unwanted logging configuration. (#4949) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @jezdez +* @kenodegard +* @pre-commit-ci[bot] + + + +## 3.26.0 (2023-07-18) + +### Enhancements + +* Add `pip` to `env-doc make` command so function works correctly (`pip` is no longer added by default with the python conda package). (#4633) +* Log extra-meta data to make it easier to verify that the right extra-meta data is burned into packages (also helps to co-relate packages and their build-log). The feature was first introduced in #4303 and is now improved via the logging call. (#4901) +* Implement subcommands as conda plugins. (#4921) + +### Bug fixes + +* Fix handling of unknown binaries with newer `(py)lief` versions. (#4900) +* Disable `LIEF` logging to remove "Unknown format" warning message. (#4850) +* Revert `enable_static` default value in `conda_build.config` to remove "Failed to get_static_lib_exports" warning messages. (#4850) +* Avoid duplicate logging by not propagating the top-level conda-build logger. (#4903) +* Fix git cloning for repositories with submodules containing local relative paths. (#4914) + +### Deprecations + +* Mark executable invocations (e.g., `conda-build`) as pending deprecation. (#4921) +* Mark module based invocations (e.g., `python -m conda_build.cli.main_build`) as pending deprecation. (#4921) + +### Docs + +* Update `pkg-spec` docs to mention `.conda` package format. (#4633) +* Drop unnecessary Jinja package name variables from `variants.rst` docs file. (#4834) + +### Other + +* Drop duplicate `get_summary` call in `conda_build.skeletons.pypi`. (#3998) +* Fix failing `resolved_packages` test due to recent OpenSSL 3.0.8 release to defaults. (#4912) + +### Contributors + +* @beeankha +* @conda-bot +* @dbast +* @jaimergp +* @jakirkham +* @josegonzalez made their first contribution in https://github.com/conda/conda-build/pull/3998 +* @katietz +* @kenodegard +* @rfezzani made their first contribution in https://github.com/conda/conda-build/pull/4850 +* @ryanskeith +* @sven6002 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + +## 3.25.0 (2023-05-22) + +### Enhancements + +* Noarch packages that use virtual packages have the virtual packages added to the hash contents of the package. This facilitates the building of noarch packages multiple times for different platforms with platform specific dependencies. (#4606) +* Add support for `svn` source credentials (`svn_username` and `svn_password`). (#4692) +* Depend on standalone `conda-index` instead of bundled indexing code. (#4828) +* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) +* Add Python 3.11 support. (#4852) + +### Bug fixes + +* Ensure `tests/commands` are also run in the presence of `run_test.*` (#4429) +* Require the source when rendering a recipe that uses the `load_file_data` function. (#4817) +* Download packages during build into the correct `subdir` folder. (#4832) +* Use a unique `subdir` variable name when rebuilding the index for multi-output builds. (#4862) + +### Deprecations + +* Inline `conda index` logic is pending deprecation. `conda-build` still provides `conda-index` a.k.a. `conda index` CLI, but uses standalone `conda-index` during builds. (#4828) +* Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), instead of `conda-build index` or `conda index`, to use faster indexing code. (#4828) +* Mark `conda_build.metadata.ns_cfg` as pending deprecation. Use `conda_build.get_selectors.get_selectors` instead. (#4837) +* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) +* Mark `conda_build.config._ensure_dir` as pending deprecation. Use `stdlib`'s `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) + +### Other + +* Format with `black` and replaced pre-commit's `darker` hook with `black`. (#4836) +* Format with `isort` and add pre-commit `isort` hook. (#4836) +* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) +* Enable `xattr` test on macOS. (#4845) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @duncanmmacleod +* @ffirmanff made their first contribution in https://github.com/conda/conda-build/pull/4692 +* @isuruf +* @jezdez +* @jakirkham +* @jjhelmus +* @kenodegard +* @rishabh11336 made their first contribution in https://github.com/conda/conda-build/pull/4782 +* @ryanskeith made their first contribution in https://github.com/conda/conda-build/pull/4843 +* @pre-commit-ci[bot] + + +## 3.24.0 (2023-03-22) + +### Bug fixes + +* Fix the failing `git clone` when source has LFS files. (#4318) +* Fix many false-positives during the detection of Perl core modules in `conda skeleton cpan`. (#4592) +* `conda skeleton cpan` now correctly adds a C compiler as dependency if the distribution contains an `.xs` file. (#4599) +* Install downstream packages in correct subdir. (#4763, #4803) +* Update supported Python version in setup.py. (#4804) + +### Deprecations + +* Removed conda <4.13 logic. (#4677) +* `conda_build.conda_interface.CrossPlatformStLink` is pending deprecation in favor of using `os.stat().st_nlink`. (#4728) +* Drop Python 3.7 support. (#4796) + +### Docs + +* Updated broken links to example conda recipes and updated link to the now archived conda-recipes, with additional links to AnacondaRecipes aggregated feedstocks and conda-forge feedstocks. (#4580) +* Replaced two instances of "Anaconda Cloud" with "anaconda.org". (#4719) + +### Other + +* Update test matrix to run tests on all supported Python versions on Linux. Only run tests on lower & upper Python bounds for Windows and macOS. (#4691) +* Re-enable code coverage reporting to `codecov`. (#4767) +* Eliminate test setup's manual clone of https://github.com/conda/conda_build_test_recipe in favor of a session fixture. (#4781) +* Use `tomllib` (Python 3.11+) or `tomli` for `.toml` support. (#4783) + +### Contributors + +* @beeankha +* @conda-bot +* @dbast +* @dholth +* @ernstluring made their first contribution in https://github.com/conda/conda-build/pull/4318 +* @xileF1337 made their first contribution in https://github.com/conda/conda-build/pull/4592 +* @jezdez +* @jakirkham +* @johnnynunez made their first contribution in https://github.com/conda/conda-build/pull/4804 +* @kathatherine +* @kenodegard +* @minrk +* @peetw made their first contribution in https://github.com/conda/conda-build/pull/4662 +* @sven6002 made their first contribution in https://github.com/conda/conda-build/pull/4621 +* @tttc3 made their first contribution in https://github.com/conda/conda-build/pull/4580 +* @dependabot[bot] +* @pre-commit-ci[bot] + +## 3.23.3 (2022-12-06) + +### Bug fixes + +* Change Zstd default compression to 19. (#4663) +* Fix build/host environment activation broken in >=3.23.0,<=3.23.2. (#4665) +* Add `PREFIX/bin` to `PATH` on Windows and remove `PREFIX` root from `PATH` on Unix. (#4665) + +### Other + +* Skip test suite for non-code changes. (#4664) + +### Contributors + +* @jakirkham +* @kenodegard +* @mbargull + +## 3.23.2 (2022-11-30) + +### Bug fixes + +* `conda-build` CLI overrode `condarc`'s `zstd_compression_level` with the default value. (#4650) + +### Contributors + +* @kenodegard +* @mbargull +* @pre-commit-ci[bot] + ## 3.23.1 (2022-11-17) ### Bug fixes @@ -2579,7 +3212,7 @@ https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#host * pyldd: disambiguate java .class files from Mach-O fat files (same magic number) #2328 * fix hash regex for downloaded files in `src_cache` #2330 * fix `zip_keys` becoming a loop dimension when variants passed as object rather than loaded from file #2333 -* fix windows always warning about old compiler activation. Now only warns if {{ compiler() }} is not used. #2333 +* fix windows always warning about old compiler activation. Now only warns if `{{ compiler() }}` is not used. #2333 * Add `LD_RUN_PATH` back into Linux variables for now (may remove later, but will have deprecation cycle) #2334 ### Contributors diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 1c3434a0ef..663464fe82 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,6 +1,6 @@ # Conda Organization Code of Conduct -> **Note** +> [!NOTE] > Below is the short version of our CoC, see the long version [here](https://github.com/conda-incubator/governance/blob/main/CODE_OF_CONDUCT.md). # The Short Version diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4c1c2227bf..683faf9597 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,7 +50,7 @@ impact the functionality of `conda/conda-build` installed in your base environme ``` bash # create/activate standalone dev env - $ CONDA_ENV=conda-build make setup + $ ENV_NAME=conda-build make setup $ conda activate conda-build # Run all tests on Linux and Mac OS X systems (this can take a long time) @@ -64,10 +64,6 @@ impact the functionality of `conda/conda-build` installed in your base environme ## Testing -Running our test suite requires cloning one other repo at the same level as `conda-build`: -https://github.com/conda/conda_build_test_recipe - this is necessary for relative path tests -outside of `conda-build`'s build tree. - Follow the installation instructions above to properly set up your environment for testing. The test suite runs with `pytest`. The following are some useful commands for running specific diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index e9cf001e64..46a13ecd98 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -6,42 +6,52 @@ [project-sorting]: https://github.com/orgs/conda/projects/2/views/11 [project-support]: https://github.com/orgs/conda/projects/2/views/12 [project-backlog]: https://github.com/orgs/conda/projects/2/views/13 -[project-sprint]: https://github.com/orgs/conda/projects/2/views/14 +[project-in-progress]: https://github.com/orgs/conda/projects/2/views/14 [docs-toc]: https://github.blog/changelog/2021-04-13-table-of-contents-support-in-markdown-files/ [docs-actions]: https://docs.github.com/en/actions [docs-saved-reply]: https://docs.github.com/en/get-started/writing-on-github/working-with-saved-replies/creating-a-saved-reply +[docs-commit-signing]: https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits -[workflow-sync]: https://github.com/conda/infra/blob/main/.github/workflows/sync.yml -[labels-global]: https://github.com/conda/infra/blob/main/.github/global.yml +[infrastructure]: https://github.com/conda/infrastructure +[workflow-sync]: https://github.com/conda/infrastructure/blob/main/.github/workflows/sync.yml +[labels-global]: https://github.com/conda/infrastructure/blob/main/.github/global.yml +[workflow-cla]: /.github/workflows/cla.yml [workflow-issues]: /.github/workflows/issues.yml +[workflow-labels]: /.github/workflows/labels.yml +[workflow-lock]: /.github/workflows/lock.yml [workflow-project]: /.github/workflows/project.yml +[workflow-stale]: /.github/workflows/stale.yml [labels-local]: /.github/labels.yml +[labels-page]: ../../labels -## How We Use GitHub +# How We Use GitHub -> **Note** -> For easy navigation use [GitHub's table of contents feature][docs-toc]. - -This document seeks to outline how we as a community use GitHub Issues to track bugs and feature requests while still catering to development practices & project management (*e.g.*, release cycles, feature planning, priority sorting, etc.). +This document seeks to outline how we as a community use GitHub Issues to track bugs and feature requests while still catering to development practices & project management (_e.g._, release cycles, feature planning, priority sorting, etc.). -Topics: - - [What is Issue Sorting?](#what-is-issue-sorting) - - [Types of tickets](#types-of-tickets) - - [Normal Ticket/Issue](#normal-ticketissue) +**Topics:** + + - [What is "Issue Sorting"?](#what-is-issue-sorting) + - [Issue Sorting Procedures](#issue-sorting-procedures) + - [Commit Signing](#commit-signing) + - [Types of Issues](#types-of-issues) + - [Standard Issue](#standard-issue) - [Epics](#epics) - [Spikes](#spikes) + - [Working on Issues](#working-on-issues) +> [!NOTE] +> This document is written in the style of an FAQ. For easier navigation, use [GitHub's table of contents feature][docs-toc]. -### What is "Issue Sorting"? +## What is "Issue Sorting"? -> **Note** -> "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (*e.g.*, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (*e.g.*, severities may not be assigned, etc.). +> [!NOTE] +> "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (_e.g._, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (_e.g._, severities may not be assigned, etc.). -"Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of tickets: +"Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of issues: ```mermaid flowchart LR @@ -58,8 +68,8 @@ flowchart LR board_backlog-- refine -->board_backlog end - subgraph flow_sprint [Sprint] - board_sprint{{Sprint}} + subgraph flow_progress [In Progress] + board_progress{{In Progress}} end state_new(New Issues) @@ -69,15 +79,12 @@ flowchart LR board_sorting-- investigated -->board_backlog board_sorting-- duplicates, off-topic -->state_closed board_support-- resolved, unresponsive -->state_closed - board_backlog-- pending work -->board_sprint + board_backlog-- pending work -->board_progress board_backlog-- resolved, irrelevant -->state_closed - board_sprint-- resolved -->state_closed + board_progress-- resolved -->state_closed ``` -In order to explain how various `conda` issues are evaluated, the following document will provide information about our sorting process in the form of an FAQ. - - -#### Why sort issues? +### Why sort issues? At the most basic "bird's eye view" level, sorted issues will fall into the category of four main priority levels: @@ -88,117 +95,137 @@ At the most basic "bird's eye view" level, sorted issues will fall into the cate At its core, sorting enables new issues to be placed into these four categories, which helps to ensure that they will be processed at a velocity similar to or exceeding the rate at which new issues are coming in. One of the benefits of actively sorting issues is to avoid engineer burnout and to make necessary work sustainable; this is done by eliminating a never-ending backlog that has not been reviewed by any maintainers. -There will always be broad-scope design and architecture implementations that the `conda` maintainers will be interested in pursuing; by actively organizing issues, the sorting engineers will be able to more easily track and tackle both specific and big-picture goals. - -#### Who does the sorting? - -Sorting engineers are a `conda` governance [sub-team][sub-team]; they are a group of Anaconda and community members who are responsible for making decisions regarding closing issues and setting feature work priorities, amongst other sorting-related tasks. +There will always be broad-scope design and architecture implementations that the maintainers will be interested in pursuing; by actively organizing issues, the sorting engineers will be able to more easily track and tackle both specific and big-picture goals. +### Who does the sorting? -#### How do items show up for sorting? +Sorting engineers are a conda governance [sub-team][sub-team]; they are a group of community members who are responsible for making decisions regarding closing issues and setting feature work priorities, among other sorting-related tasks. -New issues that are opened in any of the repositories in the [`conda` GitHub project][conda-org] will show up in the `Sorting` view of the [Planning project][project-planning]. This process is executed via [GitHub Actions][docs-actions]. The two main GitHub Actions workflows utilized for this purpose are [Issues][workflow-issues] and [Project][workflow-project]. +### How do items show up for sorting? -The GitHub Actions in the `conda/infra` repository are viewed as canonical; the [Sync workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. +New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/project.yml`][workflow-project]. +The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. -#### What is done about the issues in "sorting" mode? +### What is done about the issues in the "Sorting" tab? -Issues in the ["Sorting" tab of the project board][project-sorting] have been reviewed by a sorting engineer and are considered ready for the following procedures: +Issues in the ["Sorting" tab of the project board][project-sorting] are considered ready for the following procedures: - Mitigation via short-term workarounds and fixes - Redirection to the correct project - Determining if support can be provided for errors and questions - Closing out of any duplicate/off-topic issues -The sorting engineers on rotation are not seeking to _resolve_ issues that arise. Instead, the goal is to understand the ticket and to determine whether it is an issue in the first place, and then to collect as much relevant information as possible so that the maintainers of `conda` can make an informed decision about the appropriate resolution schedule. +The sorting engineers on rotation are not seeking to _resolve_ issues that arise. Instead, the goal is to understand the issue and to determine whether it is legitimate, and then to collect as much relevant information as possible so that the maintainers can make an informed decision about the appropriate resolution schedule. -Issues will remain in the "Sorting" tab as long as the issue is in an investigatory phase (_e.g._, querying the user for more details, asking the user to attempt other workarounds, other debugging efforts, etc.) and are likely to remain in this state the longest, but should still be progressing over the course of 1-2 weeks. +Issues will remain in the ["Sorting" tab][project-sorting] as long as the issue is in an investigatory phase (_e.g._, querying the user for more details, asking the user to attempt other workarounds, other debugging efforts, etc.) and are likely to remain in this state the longest, but should still be progressing over the course of 1-2 weeks. +For more information on the sorting process, see [Issue Sorting Procedures](#issue-sorting-procedures). -#### When do items move out of the "Sorting" tab? +### When do items move out of the "Sorting" tab? -The additional tabs in the project board that the issues can be moved to include the following: +Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following: -- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. All issues considered "support" should include the https://github.com/conda/infra/labels/type%3A%3Asupport label. -- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. +- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) label to move an issue to this tab. +- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the [`backlog`](https://github.com/conda/infrastructure/labels/backlog) label to move an issue to this tab. - **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc. +### Where do work issues go after being sorted? -#### Where do items go after being sorted? +Once issues are deemed ready to be worked on, they will be moved to the ["Backlog" tab of the Planning board][project-backlog]. Once actively in progress, the issues will be moved to the ["In Progress" tab of the Planning board][project-in-progress] and then closed out once the work is complete. -All sorted issues will be reviewed by sorting engineers during a weekly Refinement meeting in order to understand how those particular issues fit into the short- and long-term roadmap of `conda`. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests. +### What is the purpose of having a "Backlog"? -Once issues are deemed ready to be worked on, they will be moved to the [`conda` Backlog tab of the Planning board][project-backlog] on GitHub. Once actively in progress, the issues will be moved to the [Sprint tab of the Planning board][project-sprint] and then closed out once the work is complete. +Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release. +### What automation procedures are currently in place? -#### What is the purpose of having a "Backlog"? +Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include: -Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release. Weekly Refinement meetings are a time when the `conda` engineers will transition issues from "[Sorting][project-sorting]" to "[Backlog][project-backlog]". Additionally, this time of handoff will include discussions around the kind of issues that were raised, which provides an opportunity to identify any patterns that may point to a larger problem. +- [Marking of issues and pull requests as stale][workflow-stale], resulting in: + - issues marked as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) + - all other inactive issues (not labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) + - all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) +- [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days +- [Adding new issues and pull requests to the respective project boards][workflow-project] +- [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling [`pending::feedback`](https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback) with [`pending::support`](https://github.com/conda/infrastructure/labels/pending%3A%3Asupport) after a contributor leaves a comment +- [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done +- [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories +## Issue Sorting Procedures -#### What is the purpose of a "development sprint"? +### How are issues sorted? -After issues have been sorted and backlogged, they will eventually be moved into the "Sprint Candidate", "Short-Term", "Medium-Term", "Long-Term", or "No Time Frame" sections of the [Backlog tab of the Planning board][project-backlog] and get one or more sprint cycles dedicated to them. +Issues in the ["Sorting" tab of the Planning board][project-sorting] are reviewed by issue sorting engineers, who take rotational sorting shifts. In the process of sorting issues, engineers label the issues and move them to the other tabs of the project board for further action. -The purpose of a development sprint is to enable a steady delivery of enhancements, features, and bug fixes by setting aside pre-determined portions of time that are meant for focusing on specifically-assigned items. +Issues that require input from multiple members of the sorting team will be brought up during refinement meetings in order to understand how those particular issues fit into the short- and long-term roadmap. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests. -Sprints also serve to focus the engineering team's attention on more accurate planning for what is to come during the entire release cycle, as well as keep the scope of development work concise. They enable the setting aside of dedicated time for the engineers to resolve any problems with the work involved, instead of pushing these problems to the end of the release cycle when there may not be any time remaining to fix issues. +### How does labeling work? +Labeling is a very important means for sorting engineers to keep track of the current state of an issue with regards to the asynchronous nature of communicating with users. Utilizing the proper labels helps to identify the severity of the issue as well as to quickly understand the current state of a discussion. -#### How does labeling work? +Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category. -Labeling is a very important means for sorting engineers to keep track of the current state of an issue with regards to the asynchronous nature of communicating with users. Utilizing the proper labels helps to identify the severity of the issue as well as to quickly understand the current state of a discussion. +Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%3Abug), [`type::feature`](https://github.com/conda/infrastructure/labels/type%3A%3Afeature), and [`type::documentation`](https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation), where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, [`os::linux`](https://github.com/conda/infrastructure/labels/os%3A%3Alinux), [`os::macos`](https://github.com/conda/infrastructure/labels/os%3A%3Amacos), and [`os::windows`](https://github.com/conda/infrastructure/labels/os%3A%3Awindows)), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. + +Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport), that issue will be marked [`stale`](https://github.com/conda/infrastructure/labels/stale) after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. + +### What labels are required for each issue? + +At minimum, both `type` and `source` labels should be specified on each issue before moving it from the "Sorting" tab to the "Backlog" tab. All issues that are bugs should also be tagged with a `severity` label. -Generally speaking, labels with the same category are considered mutually exclusive but in some cases labels sharing the same category can occur concurrently as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/infra/labels/type%3A%3Abug, https://github.com/conda/infra/labels/type%3A%3Afeature, and https://github.com/conda/infra/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue shouldn’t be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/infra/labels/os%3A%3Alinux, https://github.com/conda/infra/labels/os%3A%3Amacos, and https://github.com/conda/infra/labels/os%3A%3Awindows), an issue could be labeled with one or more depending on the system(s) the issue is occurring on. +The `type` labels are exclusive of each other: each sorted issue should have exactly one `type` label. These labels give high-level information on the issue's classification (_e.g._, bug, feature, tech debt, etc.) -Please note that there are also automation policies in place. For example, if an issue is labeled as https://github.com/conda/infra/labels/pending%3A%3Afeedback and https://github.com/conda/infra/labels/unreproducible, that issue will be auto-closed after a month of inactivity. +The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups. +The `severity` labels are exclusive of each other and, while required for the [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%bug) label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. -#### How are new labels defined? +Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use. -Labels are defined using a scoped syntax with an optional high-level category (_e.g._, source, tag, type, etc.) and a specific topic, much like the following: +### How are new labels defined? + +Labels are defined using a scoped syntax with an optional high-level category (_e.g._, `source`, `tag`, `type`, etc.) and a specific topic, much like the following: - `[topic]` - `[category::topic]` - `[category::topic-phrase]` -This syntax helps with issue sorting enforcement; at minimum, both `type` and `source` labels should be specified on each issue before moving it from "`Sorting`" to "`Backlog`". - -There are a number of labels that have been defined for the different `conda` projects. In order to create a streamlined sorting process, label terminologies are standardized using similar (if not the same) labels. - +This syntax helps with issue sorting enforcement, as it helps to ensure that sorted issues are, at minimum, categorized by type and source. -#### How are new labels added? +There are a number of labels that have been defined for the different repositories. In order to create a streamlined sorting process, label terminologies are standardized using similar (if not the same) labels. -New **global** labels (_i.e._, generic labels that apply equally to all `conda` repos) can be added to the `conda/infra`'s [`.github/global.yml` file][labels-global]; new **local** labels (_i.e._, labels specific to particular `conda` repos) can be added to each repository's [`.github/labels.yml`][labels-local] file. All new labels should follow the labeling syntax described in the "How are new labels defined?" section of this document. +### How are new labels added? +New **global** labels (_i.e._, labels that apply equally to all repositories within the conda GitHub organization) are added to [`conda/infrastructure`][infrastructure]'s [`.github/global.yml` file][labels-global]; new **local** labels (_i.e._, labels specific to particular repositories) are added to each repository's [`.github/labels.yml` file][labels-local]. All new labels should follow the labeling syntax described in ["How are new labels defined?"](#how-are-new-labels-defined). Global labels are combined with any local labels and these aggregated labels are used by the [`.github/workflows/labels.yml` workflow][workflow-labels] to synchronize the labels available for the repository. -#### Are there any templates to use as responses for commonly-seen issues? +### Are there any templates to use as responses for commonly-seen issues? -Some of the same types of issues appear regularly (_e.g._, issues that are duplicates of others, tickets that should be filed in the Anaconda issue tracker, errors that are due to a user's specific setup/environment, etc.). +Some of the same types of issues appear regularly (_e.g._, issues that are duplicates of others, issues that should be filed in the Anaconda issue tracker, errors that are due to a user's specific setup/environment, etc.). Below are some boilerplate responses for the most commonly-seen issues to be sorted:
Duplicate Issue +
 
 This is a duplicate of [link to primary issue]; please feel free to continue the discussion there.
 
-> **Warning** -> Apply the https://github.com/conda/infra/labels/duplicate label to the issue being closed and https://github.com/conda/infra/labels/duplicate%3A%3Aprimary to the original issue. +> [!WARNING] +> Apply the https://github.com/conda/infrastructure/labels/duplicate label to the issue being closed and https://github.com/conda/infrastructure/labels/duplicate%3A%3Aprimary to the original issue.
Requesting an Uninstall/Reinstall of conda +
 
 Please uninstall your current version of `conda` and reinstall the latest version.
-Feel free to use either the [miniconda](https://docs.conda.io/en/latest/miniconda.html)
+Feel free to use either the [miniconda](https://docs.anaconda.com/free/miniconda/)
 or [anaconda](https://www.anaconda.com/products/individual) installer,
 whichever is more appropriate for your needs.
 
@@ -208,6 +235,7 @@ whichever is more appropriate for your needs.
Redirect to Anaconda Issue Tracker +
 
 Thank you for filing this issue! Unfortunately, this is off-topic for this repo.
@@ -216,14 +244,15 @@ If you are still encountering this issue please reopen in the
 where `conda` installer/package issues are addressed.
 
-> **Warning** -> Apply the https://github.com/conda/infra/labels/off-topic label to these tickets before closing them out. +> [!WARNING] +> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
Redirecting to Nucleus Forums +
 
 Unfortunately, this issue is outside the scope of support we offer via GitHub;
@@ -231,24 +260,75 @@ if you continue to experience the problems described here,
 please post details to the [Nucleus forums](https://community.anaconda.cloud/).
 
-> **Warning** -> Apply the https://github.com/conda/infra/labels/off-topic label to these tickets before closing them out. +> [!WARNING] +> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
-In order to not have to manually type or copy/paste the above repeatedly, please note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. +
+Slow solving of conda environment + + +
+Hi [@username],
 
+Thanks for voicing your concern about the performance of the classic dependency solver. To fix this, our official recommendation is using the new default "conda-libmamba-solver" instead of the classic solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
 
-### Types of Tickets
+In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
+
-#### Standard Ticket/Issue +
-TODO -#### Epics +In order to not have to manually type or copy/paste the above repeatedly, note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. + +## Commit Signing + +For all conda maintainers, we require commit signing and strongly recommend it for all others wishing to contribute to conda +related projects. More information about how to set this up within GitHub can be found here: + +- [GitHub's signing commits docs][docs-commit-signing] + +## Types of Issues + +### Standard Issue TODO -#### Spikes +### Epics TODO + +### Spikes + +#### What is a spike? + +"Spike" is a term that is borrowed from extreme programming and agile development. They are used when the **outcome of an issue is unknown or even optional**. For example, when first coming across a problem that has not been solved before, a project may choose to either research the problem or create a prototype in order to better understand it. + +Additionally, spikes represent work that **may or may not actually be completed or implemented**. An example of this are prototypes created to explore possible solutions. Not all prototypes are implemented and the purpose of creating a prototype is often to explore the problem space more. For research-oriented tasks, the end result of this research may be that a feature request simply is not viable at the moment and would result in putting a stop to that work. + +Finally, spikes are usually **timeboxed**. However, given the open source/volunteer nature of our contributions, we do not enforce this for our contributors. When a timebox is set, this means that we are limiting how long we want someone to work on said spike. We do this to prevent contributors from falling into a rabbit hole they may never return from. Instead, we set a time limit to perform work on the spike and then have the assignee report back. If the tasks defined in the spike have not yet been completed, a decision is made on whether it makes sense to perform further work on the spike. + +#### When do I create a spike? + +A spike should be created when we do not have enough information to move forward with solving a problem. That simply means that, whenever we are dealing with unknowns or processes the project team has never encountered before, it may be useful for us to create a spike. + +In day-to-day work, this kind of situation may appear when new bug reports or feature requests come in that deal with problems or technologies that the project team is unfamiliar with. All issues that the project team has sufficient knowledge of should instead proceed as regular issues. + +#### When do I not create a spike? + +Below are some common scenarios where creating a spike is not appropriate: + +- Writing a technical specification for a feature we know how to implement +- Design work that would go into drafting how an API is going to look and function +- Any work that must be completed or is not optional + +## Working on Issues + +### How do I assign myself to an issue I am actively reviewing? + +If you do **not** have permissions, please indicate that you are working on an issue by leaving a comment. Someone who has permissions will assign you to the issue. If two weeks have passed without a pull request or an additional comment requesting information, you may be removed from the issue and the issue reassigned. + +If you are assigned to an issue but will not be able to continue work on it, please comment to indicate that you will no longer be working on it and press `unassign me` next to your username in the `Assignees` section of the issue page (top right). + +If you **do** have permissions, please assign yourself to the issue by pressing `assign myself` under the `Assignees` section of the issue page (top right). diff --git a/LICENSE.txt b/LICENSE similarity index 100% rename from LICENSE.txt rename to LICENSE diff --git a/Makefile b/Makefile index f5f85b1a35..db5bd26292 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ SHELL := /bin/bash -o pipefail -o errexit # ENV_NAME=dev TMPDIR=$HOME make test ENV_NAME ?= conda-build DOC_ENV_NAME ?= conda-build-docs -PYTHON_VERSION ?= 3.8 +PYTHON_VERSION ?= 3.11 TMPDIR := $(shell if test -w $(TMPDIR); then echo $(TMPDIR); else echo ./tmp/ ; fi)conda-build-testing # We want to bypass the shell wrapper function and use the binary directly for conda-run specifically @@ -13,34 +13,29 @@ CONDA := $(shell which conda) # Setup env for documents env-docs: - conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) --yes + conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) pip --yes $(CONDA) run --name $(DOC_ENV_NAME) pip install -r ./docs/requirements.txt .PHONY: $(MAKECMDGOALS) .PHONY: setup -setup: ../conda_build_test_recipe +setup: $(CONDA) create --name $(ENV_NAME) --file tests/requirements.txt --channel defaults python=$(PYTHON_VERSION) # Runs all tests .PHONY: test -test: ../conda_build_test_recipe $(TMPDIR) +test: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ --basetemp $(TMPDIR) # Run the serial tests .PHONY: test-serial -test-serial: ../conda_build_test_recipe $(TMPDIR) +test-serial: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ -m "serial" --basetemp $(TMPDIR) # Run the not serial tests AKA parallel tests .PHONY: test-parallel -test-parallel: ../conda_build_test_recipe $(TMPDIR) +test-parallel: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ -m "not serial" --basetemp $(TMPDIR) -# Checkout the required test recipes -# Requires write access to the directory above this -../conda_build_test_recipe: - git clone https://github.com/conda/conda_build_test_recipe ../conda_build_test_recipe - $(TMPDIR): mkdir -p $(TMPDIR) diff --git a/README.md b/README.md index 9c4c9a2f11..cae61abbfd 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,6 @@ $ conda info $ conda install -n base conda-build ``` - ## Building Your Own Packages You can easily build your own packages for `conda`, and upload them to diff --git a/RELEASE.md b/RELEASE.md index 533f4ed52d..d45614facc 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,20 +1,28 @@ -## Release Process - -> **Note:** -> Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. - -[epic template]: ../../issues/new?assignees=&labels=epic&template=epic.yml + +[epic template]: https://github.com/conda/conda/issues/new?assignees=&labels=epic&template=epic.yml +[compare]: https://github.com/conda/infrastructure/compare +[new release]: https://github.com/conda/infrastructure/releases/new + +[infrastructure]: https://github.com/conda/infrastructure [rever docs]: https://regro.github.io/rever-docs -[compare]: ../../compare -[new release]: ../../releases/new [release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes +[merge conflicts]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts +[Anaconda Recipes]: https://github.com/AnacondaRecipes/conda-feedstock +[conda-forge]: https://github.com/conda-forge/conda-feedstock + +# Release Process + +> **Note:** +> Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. -### 1. Open the Release Issue. +## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) > **Note:** -> The [epic template][epic template] is perfect for this, just remember to remove the https://github.com/conda/infra/labels/epic label. +> The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers. + +Use the issue template below to create the release issue. After creating the release issue, pin it for easy access.
GitHub Issue Template @@ -22,41 +30,66 @@ ```markdown ### Summary -Placeholder for `conda YY.M.0` release. +Placeholder for `{{ repo.name }} YY.M.x` release. + +| Pilot | | +|---|---| +| Co-pilot | | ### Tasks -[milestone]: https://github.com/conda/conda/milestone/56 -[releases]: https://github.com/conda/conda/releases -[main]: https://github.com/AnacondaRecipes/conda-feedstock -[conda-forge]: https://github.com/conda-forge/conda-feedstock +[milestone]: {{ repo.url }}/milestone/ +[process]: {{ repo.url }}/blob/main/RELEASE.md +[releases]: {{ repo.url }}/releases +[main]: https://github.com/AnacondaRecipes/{{ repo.name }}-feedstock +[conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock +[ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ + +#### The week before release week +- [ ] Create release branch (named `YY.M.x`) +- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) - [ ] [Complete outstanding PRs][milestone] -- [ ] Create release PR - - See release process https://github.com/conda/infra/issues/541 -- [ ] [Publish Release][releases] -- [ ] Create/update `YY.M.x` branch +- [ ] Test release candidates + + +#### Release week + +- [ ] Create release PR (see [release process][process]) +- [ ] [Publish release][releases] +- [ ] Activate the `YY.M.x` branch on [ReadTheDocs][ReadTheDocs] - [ ] Feedstocks - - [ ] Bump version [Anaconda's main][main] - - [ ] Bump version [conda-forge][conda-forge] - - Link any other feedstock PRs that are necessary + - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] + - [ ] Bump version & update dependencies/tests in [conda-forge feedstock][conda-forge] + - [ ] Hand off to the Anaconda packaging team - [ ] Announce release - - [ ] Slack - - [ ] Twitter + - Blog Post (optional) + - [ ] conda.org (link to pull request) + - Long form + - [ ] Create release [announcement draft](https://github.com/conda/communications) + - [ ] [Discourse](https://conda.discourse.group/) + - [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse) + - Summary + - [ ] [Twitter](https://twitter.com/condaproject) ``` -
+> **Note:** +> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. + +## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) + +Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue). -### 2. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. -These are synced from https://github.com/conda/infra. +These are synced from [`conda/infrastructure`][infrastructure].
-

3. Run Rever.

+

4. Run rever. (ideally done on the Monday of release week)

-Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (i.e. to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. +Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. 1. Install [`rever`][rever docs] and activate the environment: @@ -69,14 +102,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Clone and `cd` into the repository if you haven't done so already: ```bash - (rever) $ git clone git@github.com:conda/conda.git + (rever) $ git clone git@github.com:{{ repo.user }}/{{ repo.name }}.git (rever) $ cd conda ``` -2. Create a release branch: +2. Fetch the latest changes from the remote and checkout the release branch created a week ago: + + ```bash + (rever) $ git fetch upstream + (rever) $ git checkout YY.M.x + ``` + +2. Create a versioned branch, this is where rever will make its changes: ```bash - (rever) $ git checkout -b release-YY.M.0 + (rever) $ git checkout -b changelog-YY.M.[$patch_number] ``` 2. Run `rever --activities authors`: @@ -104,7 +144,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - Here's a sample run where we undo the commit made by rever in order to commit the changes to `.authors.yml` separately: ```bash - (rever) $ rever --activities authors --force YY.M.0 + (rever) $ rever --activities authors --force YY.M.[$patch_number] # changes were made to .authors.yml as per the prior bullet (rever) $ git diff --name-only HEAD HEAD~1 @@ -123,7 +163,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .authors.yml" + (rever) $ git commit -m "Update .authors.yml" ``` - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: @@ -148,15 +188,15 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .mailmap" + (rever) $ git commit -m "Update .mailmap" ``` - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` @@ -168,7 +208,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut > We've also found that we like to include the PR #s inline with the text itself, e.g.: > > ```markdown - > ### Enhancements + > ## Enhancements > > * Add `win-arm64` as a known platform (subdir). (#11778) > ``` @@ -181,16 +221,16 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated news" + (rever) $ git commit -m "Update news" ``` - After completing this, you will have at most three commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 5. Run `rever --activities changelog`: @@ -215,9 +255,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 6. Now that we have successfully run the activities separately, we wish to run both together. This will ensure that the contributor list, a side-effect of the authors activity, is included in the changelog activity. @@ -230,11 +270,11 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] ``` 7. Since rever does not include stats on first-time contributors, we will need to add this manually. @@ -245,34 +285,34 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Added first contributions" + (rever) $ git commit -m "Add first-time contributions" ``` - After completing this, you will have at most six commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 - + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Added first contributions + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] + + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Add first-time contributions ``` -8. Push this release branch: +8. Push this versioned branch. ```bash - (rever) $ git push -u upstream release-YY.M.0 + (rever) $ git push -u upstream changelog-YY.M.[$patch_number] ``` -9. Open the Release PR. +9. Open the Release PR targing the `YY.M.x` branch.
GitHub PR Template ```markdown - ### Description + ## Description ✂️ snip snip ✂️ the making of a new release. @@ -281,40 +321,89 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-10. Update Release Issue to include a link to the Release PR. +10. Update release issue to include a link to the release PR. 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: > **Note:** - > Only publish the release after the Release PR is merged, until then always **save as draft**. + > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | |---|---| - | Choose a tag | `YY.M.0` | - | Target | `main` | + | Choose a tag | `YY.M.[$patch_number]` | + | Target | `YY.M.x` | | Body | copy/paste blurb from `CHANGELOG.md` |
-### 4. Wait for review and approval of Release PR. +## 5. Wait for review and approval of release PR. + +## 6. Manually test canary build(s). + +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> **Note:** +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 7. Merge release PR and publish release. + +To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. + +> **Note:** +> Release notes can be drafted and saved ahead of time. + +## 8. Merge/cherry pick the release branch over to the `main` branch. + +
+Internal process + +1. From the main "< > Code" page of the repository, select the drop down menu next to the `main` branch button and then select "View all branches" at the very bottom. + +2. Find the applicable `YY.M.x` branch and click the "New pull request" button. + +3. "Base" should point to `main` while "Compare" should point to `YY.M.x`. + +4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". + +> **Note:** +> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches. + +5. Review and merge the pull request the same as any code change pull request. + +> **Note:** +> The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action. + +
+ +## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`. -### 5. Merge Release PR and Publish Release. +> **Note:** +> Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): +> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax) +> - Add any changes via commits to that new branch +> - Open a new PR and push it against the `main` branch +> +> Make sure to include a comment on the original `autotick-bot` PR that a new pull request has been created, in order to avoid duplicating work! `regro-cf-autotick-bot` will close the auto-created PR once the new PR is merged. +> +> For more information about this process, please read the ["Pushing to regro-cf-autotick-bot branch" section of the conda-forge documentation](https://conda-forge.org/docs/maintainer/updating_pkgs.html#pushing-to-regro-cf-autotick-bot-branch). -### 6. Create a new branch (`YY.M.x`) corresponding with the release. -### 7. Open PRs to bump main and conda-forge feedstocks to use `YY.M.0`. +## 10. Hand off to Anaconda's packaging team. -### 8. Hand off to Anaconda's packaging team. +> **Note:** +> This step should NOT be done past Thursday morning EST; please start the process on a Monday, Tuesday, or Wednesday instead in order to avoid any potential debugging sessions over evenings or weekends.
Internal process -1. Open packaging request in #package_requests, include links to the Release PR and feedstock PRs. +1. Open packaging request in #package_requests Slack channel, include links to the Release PR and feedstock PRs. 2. Message packaging team/PM to let them know that a release has occurred and that you are the release manager.
-### 9. Continue championing and shepherding. +## 11. Continue championing and shepherding. -Remember to continue updating the Release Issue with the latest details as tasks are completed. +Remember to make all relevant announcements and continue to update the release issue with the latest details as tasks are completed. diff --git a/benchmarks/time_render.py b/benchmarks/time_render.py index 5fe76f6f45..41db953c87 100644 --- a/benchmarks/time_render.py +++ b/benchmarks/time_render.py @@ -2,28 +2,37 @@ # SPDX-License-Identifier: BSD-3-Clause import os -from conda_build import api - # god-awful hack to get data from the test recipes import sys + +from conda_build import api + _thisdir = os.path.dirname(__file__) sys.path.append(os.path.dirname(_thisdir)) from tests.utils import metadata_dir # noqa: E402 -variant_dir = os.path.join(metadata_dir, '..', 'variants') + +variant_dir = os.path.join(metadata_dir, "..", "variants") def time_simple_render(): - api.render(os.path.join(metadata_dir, 'python_run'), finalize=False, - bypass_env_check=True) + api.render( + os.path.join(metadata_dir, "python_run"), finalize=False, bypass_env_check=True + ) def time_top_level_variant_render(): - api.render(os.path.join(variant_dir, '02_python_version'), finalize=False, - bypass_env_check=True) + api.render( + os.path.join(variant_dir, "02_python_version"), + finalize=False, + bypass_env_check=True, + ) def time_single_top_level_multi_output(): - api.render(os.path.join(variant_dir, 'test_python_as_subpackage_loop'), - finalize=False, bypass_env_check=True) + api.render( + os.path.join(variant_dir, "test_python_as_subpackage_loop"), + finalize=False, + bypass_env_check=True, + ) diff --git a/bin/conda-build b/bin/conda-build index b203e80e1e..bf14475007 100755 --- a/bin/conda-build +++ b/bin/conda-build @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_build import main sys.exit(main()) diff --git a/bin/conda-convert b/bin/conda-convert index 66cf207213..ca85184ba4 100755 --- a/bin/conda-convert +++ b/bin/conda-convert @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_convert import main sys.exit(main()) diff --git a/bin/conda-develop b/bin/conda-develop index 7fd11d4a1b..657a533493 100755 --- a/bin/conda-develop +++ b/bin/conda-develop @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_develop import main sys.exit(main()) diff --git a/bin/conda-index b/bin/conda-index index 5a83e54d16..b1d0f34958 100755 --- a/bin/conda-index +++ b/bin/conda-index @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_index import main sys.exit(main()) diff --git a/bin/conda-inspect b/bin/conda-inspect index 59f4975780..b8204c2746 100755 --- a/bin/conda-inspect +++ b/bin/conda-inspect @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_inspect import main sys.exit(main()) diff --git a/bin/conda-metapackage b/bin/conda-metapackage index 2e976ef9d3..4a57921b0f 100755 --- a/bin/conda-metapackage +++ b/bin/conda-metapackage @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_metapackage import main sys.exit(main()) diff --git a/bin/conda-render b/bin/conda-render index 4a1ddeed18..3372118d73 100755 --- a/bin/conda-render +++ b/bin/conda-render @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_render import main sys.exit(main()) diff --git a/bin/conda-skeleton b/bin/conda-skeleton index 9a66dbe27c..ffc0aa3127 100755 --- a/bin/conda-skeleton +++ b/bin/conda-skeleton @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_skeleton import main sys.exit(main()) diff --git a/ci/github/activate_conda b/ci/github/activate_conda deleted file mode 100644 index 381e7afc0b..0000000000 --- a/ci/github/activate_conda +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -unset CONDA_SHLVL -eval "$(${1} -m conda shell.bash hook)" -conda activate base diff --git a/ci/github/install_conda_build_test_deps b/ci/github/install_conda_build_test_deps deleted file mode 100755 index a7d03305b2..0000000000 --- a/ci/github/install_conda_build_test_deps +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -# Note, this can be used on Windows but is not used by our CI. -function install_conda_build_test_deps_fn() -{ - local MACOS_ARM64=no - local DEF_CHAN=defaults - if [[ $(uname) == Darwin ]] && [[ $(uname -m) == arm64 ]]; then - MACOS_ARM64=yes - DEF_CHAN=conda-forge - fi - # avoids a python 3.7 problem - local -a _PKGS=(cytoolz conda-verify) - _PKGS+=(${DEF_CHAN}::pytest ${DEF_CHAN}::pytest-cov ${DEF_CHAN}::pytest-forked ${DEF_CHAN}::pytest-xdist) - _PKGS+=(${DEF_CHAN}::py-lief ${DEF_CHAN}::pytest-mock) - _PKGS+=(${DEF_CHAN}::pytest-replay ${DEF_CHAN}::pytest-rerunfailures) - _PKGS+=(${DEF_CHAN}::anaconda-client ${DEF_CHAN}::git ${DEF_CHAN}::requests ${DEF_CHAN}::filelock ${DEF_CHAN}::contextlib2 ${DEF_CHAN}::jinja2 ${DEF_CHAN}::pytest-rerunfailures) - _PKGS+=(${DEF_CHAN}::ripgrep ${DEF_CHAN}::pyflakes ${DEF_CHAN}::beautifulsoup4 ${DEF_CHAN}::chardet ${DEF_CHAN}::pycrypto ${DEF_CHAN}::glob2 ${DEF_CHAN}::psutil ${DEF_CHAN}::pytz ${DEF_CHAN}::tqdm) - _PKGS+=(${DEF_CHAN}::conda-package-handling ${DEF_CHAN}::perl ${DEF_CHAN}::python-libarchive-c) - _PKGS+=(${DEF_CHAN}::pip ${DEF_CHAN}::numpy ${DEF_CHAN}::pkginfo) - if [[ $(uname) =~ .*inux.* ]] && [[ ! ${MACOS_ARM64} == yes ]] ; then - _PKGS+=(${DEF_CHAN}::patchelf) - fi - if [[ $(uname) =~ M.* ]]; then - _PKGS+=(${DEF_CHAN}::m2-patch) - _PKGS+=(${DEF_CHAN}::m2-gcc-libs) - else - _PKGS+=(${DEF_CHAN}::patch) - fi - echo -e "Asking conda to install:\n${_PKGS[@]}" - conda install -y --show-channel-urls "${_PKGS[@]}" "$@" - # If we install shellcheck from conda-forge and packages from defaults at the same time (via channel::package) - # then conda-forge used for other packages too. We could force it by forcing transitive deps to also be listed - # with their channel, but, well, yuck. - if [[ ${MACOS_ARM64} == yes ]]; then - echo "Not installing shellcheck as it is unavailable on macOS arm64 at present" - else - if [[ $(uname) =~ .*inux.* ]]; then - conda install -y --show-channel-urls shellcheck - else - conda install -y --show-channel-urls conda-forge::shellcheck - fi - fi - if [[ ! -d ../conda_build_test_recipe ]]; then - pushd .. - git clone "https://github.com/conda/conda_build_test_recipe" - popd - fi -} - -install_conda_build_test_deps_fn "$@" diff --git a/ci/github/run_conda_forge_build_setup_osx b/ci/github/run_conda_forge_build_setup_osx deleted file mode 100644 index a17357c6d6..0000000000 --- a/ci/github/run_conda_forge_build_setup_osx +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -export PYTHONUNBUFFERED=1 - -# deployment target should be set by conda_build_config.yaml (default in conda-forge-pinning). -# The default here will only be used when that is undefined, -# which should only be recipes still using conda-build 2. -export MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET:-10.15} -export CPU_COUNT=$(sysctl -n hw.ncpu) -export INSTALL_XCODE=${INSTALL_XCODE:-0} -echo "Intial \$(xcode-select -p) is $(xcode-select -p)" -echo "PATH is $PATH" -echo "INSTALL_XCODE is $INSTALL_XCODE" -if [[ ${INSTALL_XCODE} == 1 ]]; then - sudo xcode-select --switch /Applications/Xcode_12.5.app/Contents/Developer - echo "After selecting Xcode_12.5.app, \$(xcode-select -p) is $(xcode-select -p)" -fi -echo "PATH is $PATH" -export CONDA_BUILD_SYSROOT="$(xcode-select -p)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk" - -if [[ ! -d ${CONDA_BUILD_SYSROOT} || "$OSX_FORCE_SDK_DOWNLOAD" == "1" ]]; then - echo "Downloading ${MACOSX_DEPLOYMENT_TARGET} sdk" - curl -L -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk.tar.xz - tar -xf MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk.tar.xz -C "$(dirname "$CONDA_BUILD_SYSROOT")" - # set minimum sdk version to our target - if [[ ${INSTALL_XCODE} == 1 ]]; then - plutil -replace MinimumSDKVersion -string ${MACOSX_DEPLOYMENT_TARGET} $(xcode-select -p)/Platforms/MacOSX.platform/Info.plist - plutil -replace DTSDKName -string macosx${MACOSX_DEPLOYMENT_TARGET}internal $(xcode-select -p)/Platforms/MacOSX.platform/Info.plist - fi -fi - -if [ -d "${CONDA_BUILD_SYSROOT}" ] -then - echo "Found CONDA_BUILD_SYSROOT: ${CONDA_BUILD_SYSROOT}" -else - echo "Missing CONDA_BUILD_SYSROOT: ${CONDA_BUILD_SYSROOT}" - exit 1 -fi - -conda config --set show_channel_urls true -conda config --set auto_update_conda false -conda config --set add_pip_as_python_dependency false - -# CONDA_PREFIX might be unset -export CONDA_PREFIX="${CONDA_PREFIX:-$(conda info --json | jq -r .root_prefix)}" - -mkdir -p "${CONDA_PREFIX}/etc/conda/activate.d" -echo "export CONDA_BUILD_SYSROOT='${CONDA_BUILD_SYSROOT}'" > "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export CPU_COUNT='${CPU_COUNT}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export PYTHONUNBUFFERED='${PYTHONUNBUFFERED}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export MACOSX_DEPLOYMENT_TARGET='${MACOSX_DEPLOYMENT_TARGET}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" - -conda info -conda config --show-sources -conda list --show-channel-urls diff --git a/ci/github/setup_pytest_replay b/ci/github/setup_pytest_replay deleted file mode 100644 index f90955bd58..0000000000 --- a/ci/github/setup_pytest_replay +++ /dev/null @@ -1,5 +0,0 @@ -mkdir $BUILD_ARTIFACTSTAGINGDIRECTORY/pytest-replay -declare -a PYTEST_REPLAY_OPTIONS=() -PYTEST_REPLAY_OPTIONS+=("--replay-record-dir=$BUILD_ARTIFACTSTAGINGDIRECTORY/pytest-replay") -PYTEST_REPLAY_OPTIONS+=("--replay-base-name=Linux-$CONDA_VERSION-Py$PYTHON_VERSION") -echo "##vso[task.setvariable variable=PYTEST_REPLAY_OPTIONS]${PYTEST_REPLAY_OPTIONS[@]}" diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 481c819391..f110bb23b5 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -1,17 +1,46 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from . import _version -__version__ = _version.get_versions()['version'] +try: + from ._version import __version__ +except ImportError: + # _version.py is only created after running `pip install` + try: + from setuptools_scm import get_version + + __version__ = get_version(root="..", relative_to=__file__) + except (ImportError, OSError, LookupError): + # ImportError: setuptools_scm isn't installed + # OSError: git isn't installed + # LookupError: setuptools_scm unable to detect version + # Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: + # YY.MM.MICRO.devN+gHASH[.dirty] + __version__ = "0.0.0.dev0+placeholder" + +__all__ = ["__version__"] # Sub commands added by conda-build to the conda command sub_commands = [ - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'metapackage', - 'render' - 'skeleton', + "build", + "convert", + "develop", + "index", + "inspect", + "metapackage", + "render", + "skeleton", ] + +# Skip context logic for doc generation since we don't install all dependencies in the CI doc build environment, +# see .readthedocs.yml file +try: + import os + + from conda.base.context import reset_context + + # Disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. + os.environ["CONDA_ALLOW_SOFTLINKS"] = "false" + reset_context() + +except ImportError: + pass diff --git a/conda_build/_link.py b/conda_build/_link.py index 50c0da3641..e8984fcd37 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -3,14 +3,16 @@ """ This is code that is added to noarch Python packages. See conda_build/noarch_python.py. - """ + +from __future__ import annotations + import os -from os.path import dirname, exists, isdir, join, normpath import re -import sys import shutil - +import sys +from os.path import dirname, exists, isdir, join, normpath +from pathlib import Path # Silence pyflakes. This variable is added when link.py is written by # conda_build.noarch_python. @@ -19,22 +21,24 @@ THIS_DIR = dirname(__file__) PREFIX = normpath(sys.prefix) -if sys.platform == 'win32': - BIN_DIR = join(PREFIX, 'Scripts') - SITE_PACKAGES = 'Lib/site-packages' +if sys.platform == "win32": + BIN_DIR = join(PREFIX, "Scripts") + SITE_PACKAGES = "Lib/site-packages" else: - BIN_DIR = join(PREFIX, 'bin') - SITE_PACKAGES = 'lib/python%s/site-packages' % sys.version[:3] + BIN_DIR = join(PREFIX, "bin") + SITE_PACKAGES = f"lib/python{sys.version[:3]}/site-packages" # the list of these files is going to be store in info/_files FILES = [] # three capture groups: whole_shebang, executable, options -SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string - br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path - br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA - br'(.*)' # the rest of the line can contain option flags - br')$') # end whole_shebang group +SHEBANG_REGEX = ( + rb"^(#!" # pretty much the whole match string + rb"(?:[ ]*)" # allow spaces between #! and beginning of the executable path + rb"(/(?:\\ |[^ \n\r\t])*)" # the executable is the next text block without an escaped space or non-space whitespace character # NOQA + rb"(.*)" # the rest of the line can contain option flags + rb")$" +) # end whole_shebang group def _link(src, dst): @@ -52,12 +56,18 @@ def _unlink(path): pass -def pyc_f(f, version_info=sys.version_info): +def pyc_f( + path: str | os.PathLike, + version_info: tuple[int, ...] = sys.version_info, +) -> str: + path = Path(path) if version_info[0] == 2: - return f + 'c' - dn, fn = f.rsplit('/', 1) - return '%s/__pycache__/%s.cpython-%d%d.pyc' % ( - dn, fn[:-3], version_info[0], version_info[1]) + return str(path.with_suffix(".pyc")) + return str( + path.parent + / "__pycache__" + / f"{path.stem}.cpython-{version_info[0]}{version_info[1]}.pyc" + ) def link_files(src_root, dst_root, files): @@ -70,48 +80,50 @@ def link_files(src_root, dst_root, files): if exists(dst): _unlink(dst) _link(src, dst) - f = f'{dst_root}/{f}' + f = f"{dst_root}/{f}" FILES.append(f) - if f.endswith('.py'): + if f.endswith(".py"): FILES.append(pyc_f(f)) # yanked from conda def replace_long_shebang(data): # this function only changes a shebang line if it exists and is greater than 127 characters - if hasattr(data, 'encode'): + if hasattr(data, "encode"): data = data.encode() shebang_match = re.match(SHEBANG_REGEX, data, re.MULTILINE) if shebang_match: whole_shebang, executable, options = shebang_match.groups() if len(whole_shebang) > 127: - executable_name = executable.decode('utf-8').split('/')[-1] - new_shebang = '#!/usr/bin/env {}{}'.format(executable_name, options.decode('utf-8')) - data = data.replace(whole_shebang, new_shebang.encode('utf-8')) - if hasattr(data, 'decode'): + executable_name = executable.decode("utf-8").split("/")[-1] + new_shebang = "#!/usr/bin/env {}{}".format( + executable_name, options.decode("utf-8") + ) + data = data.replace(whole_shebang, new_shebang.encode("utf-8")) + if hasattr(data, "decode"): data = data.decode() return data def create_script(fn): - src = join(THIS_DIR, 'python-scripts', fn) + src = join(THIS_DIR, "python-scripts", fn) dst = join(BIN_DIR, fn) - if sys.platform == 'win32': - shutil.copy2(src, dst + '-script.py') - FILES.append('Scripts/%s-script.py' % fn) - shutil.copy2(join(THIS_DIR, - 'cli-%d.exe' % (8 * tuple.__itemsize__)), - dst + '.exe') - FILES.append('Scripts/%s.exe' % fn) + if sys.platform == "win32": + shutil.copy2(src, dst + "-script.py") + FILES.append(f"Scripts/{fn}-script.py") + shutil.copy2( + join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" + ) + FILES.append(f"Scripts/{fn}.exe") else: with open(src) as fi: data = fi.read() - with open(dst, 'w') as fo: - shebang = replace_long_shebang('#!%s\n' % normpath(sys.executable)) + with open(dst, "w") as fo: + shebang = replace_long_shebang(f"#!{normpath(sys.executable)}\n") fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append('bin/%s' % fn) + FILES.append(f"bin/{fn}") def create_scripts(files): @@ -124,15 +136,14 @@ def create_scripts(files): def main(): - create_scripts(DATA['python-scripts']) - link_files('site-packages', SITE_PACKAGES, DATA['site-packages']) - link_files('Examples', 'Examples', DATA['Examples']) + create_scripts(DATA["python-scripts"]) + link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) + link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, 'conda-meta', - '%s.files' % DATA['dist']), 'w') as fo: + with open(join(PREFIX, "conda-meta", "{}.files".format(DATA["dist"])), "w") as fo: for f in FILES: - fo.write('%s\n' % f) + fo.write(f"{f}\n") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py index fd4bef91f1..9180c404fc 100644 --- a/conda_build/_load_setup_py_data.py +++ b/conda_build/_load_setup_py_data.py @@ -1,18 +1,24 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import logging import os import sys -import logging -def load_setup_py_data(setup_file, from_recipe_dir=False, recipe_dir=None, work_dir=None, - permit_undefined_jinja=True): +def load_setup_py_data( + setup_file, + from_recipe_dir=False, + recipe_dir=None, + work_dir=None, + permit_undefined_jinja=True, +): _setuptools_data = {} log = logging.getLogger(__name__) - import setuptools import distutils.core + import setuptools + cd_to_work = False path_backup = sys.path @@ -39,8 +45,10 @@ def _change_cwd(target_dir): if not os.path.isabs(setup_file): setup_file = os.path.join(work_dir, setup_file) else: - message = ("Did not find setup.py file in manually specified location, and source " - "not downloaded yet.") + message = ( + "Did not find setup.py file in manually specified location, and source " + "not downloaded yet." + ) if permit_undefined_jinja: log.debug(message) return {} @@ -58,7 +66,7 @@ def _change_cwd(target_dir): except ImportError: pass # setuptools <30.3.0 cannot read metadata / options from 'setup.cfg' else: - setup_cfg = os.path.join(os.path.dirname(setup_file), 'setup.cfg') + setup_cfg = os.path.join(os.path.dirname(setup_file), "setup.cfg") if os.path.isfile(setup_cfg): # read_configuration returns a dict of dicts. Each dict (keys: 'metadata', # 'options'), if present, provides keyword arguments for the setup function. @@ -77,12 +85,15 @@ def setup(**kw): numpy_setup = None versioneer = None - if 'versioneer' in sys.modules: - versioneer = sys.modules['versioneer'] - del sys.modules['versioneer'] + if "versioneer" in sys.modules: + versioneer = sys.modules["versioneer"] + del sys.modules["versioneer"] try: + # numpy.distutils deprecated in Python 3.12+ + # see https://numpy.org/doc/stable/reference/distutils_status_migration.html import numpy.distutils.core + numpy_setup = numpy.distutils.core.setup numpy.distutils.core.setup = setup except ImportError: @@ -90,19 +101,19 @@ def setup(**kw): setuptools.setup = distutils.core.setup = setup ns = { - '__name__': '__main__', - '__doc__': None, - '__file__': setup_file, + "__name__": "__main__", + "__doc__": None, + "__file__": setup_file, } if os.path.isfile(setup_file): with open(setup_file) as f: - code = compile(f.read(), setup_file, 'exec', dont_inherit=1) + code = compile(f.read(), setup_file, "exec", dont_inherit=1) exec(code, ns, ns) else: if not permit_undefined_jinja: - raise TypeError(f'{setup_file} is not a file that can be read') + raise TypeError(f"{setup_file} is not a file that can be read") - sys.modules['versioneer'] = versioneer + sys.modules["versioneer"] = versioneer distutils.core.setup = distutils_setup setuptools.setup = setuptools_setup @@ -116,26 +127,42 @@ def setup(**kw): return _setuptools_data -if __name__ == '__main__': - import json +if __name__ == "__main__": import argparse - parser = argparse.ArgumentParser(description='run setup.py file to obtain metadata') - parser.add_argument('work_dir', help=('path to work dir, where we\'ll write the output data ' - 'json, and potentially also where setup.py should be found')) - parser.add_argument('setup_file', help='path or filename of setup.py file') - parser.add_argument('--from-recipe-dir', help=('look for setup.py file in recipe ' - 'dir (as opposed to work dir)'), - default=False, action="store_true") - parser.add_argument('--recipe-dir', help=('(optional) path to recipe dir, where ' - 'setup.py should be found')) - - parser.add_argument('--permit-undefined-jinja', help=('look for setup.py file in recipe ' - 'dir (as opposed to work dir)'), - default=False, action="store_true") + import json + + parser = argparse.ArgumentParser(description="run setup.py file to obtain metadata") + parser.add_argument( + "work_dir", + help=( + "path to work dir, where we'll write the output data " + "json, and potentially also where setup.py should be found" + ), + ) + parser.add_argument("setup_file", help="path or filename of setup.py file") + parser.add_argument( + "--from-recipe-dir", + help="look for setup.py file in recipe dir (as opposed to work dir)", + default=False, + action="store_true", + ) + parser.add_argument( + "--recipe-dir", + help="(optional) path to recipe dir, where setup.py should be found", + ) + + parser.add_argument( + "--permit-undefined-jinja", + help="look for setup.py file in recipe dir (as opposed to work dir)", + default=False, + action="store_true", + ) args = parser.parse_args() # we get back a dict of the setup data data = load_setup_py_data(**args.__dict__) - with open(os.path.join(args.work_dir, 'conda_build_loaded_setup_py.json'), 'w') as f: + with open( + os.path.join(args.work_dir, "conda_build_loaded_setup_py.json"), "w" + ) as f: # this is lossy. Anything that can't be serialized is either forced to None or # removed completely. json.dump(data, f, skipkeys=True, default=lambda x: None) diff --git a/conda_build/_version.py b/conda_build/_version.py deleted file mode 100644 index 06b9cf8f38..0000000000 --- a/conda_build/_version.py +++ /dev/null @@ -1,657 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "conda-build-" - cfg.versionfile_source = "conda_build/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print(f"unable to find command, tried {commands}") - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs) as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} diff --git a/conda_build/api.py b/conda_build/api.py index 3b05833245..cc866a865d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -9,173 +9,243 @@ but only use those kwargs in config. Config must change to support new features elsewhere. """ +from __future__ import annotations + # imports are done locally to keep the api clean and limited strictly # to conda-build's functionality. - -import sys as _sys - -# make the Config class available in the api namespace -from conda_build.config import (Config, get_or_merge_config, get_channel_urls, - DEFAULT_PREFIX_LENGTH as _prefix_length) -from conda_build.utils import ensure_list as _ensure_list -from conda_build.utils import expand_globs as _expand_globs -from conda_build.utils import get_logger as _get_logger +import os +import sys from os.path import dirname, expanduser, join +from pathlib import Path +from typing import TYPE_CHECKING, Iterable - -def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, - finalize=True, bypass_env_check=False, **kwargs): +# make the Config class available in the api namespace +from .config import DEFAULT_PREFIX_LENGTH as _prefix_length +from .config import Config, get_channel_urls, get_or_merge_config +from .metadata import MetaData, MetaDataTuple +from .utils import ( + CONDA_PACKAGE_EXTENSIONS, + LoggingContext, + ensure_list, + expand_globs, + find_recipe, + get_skip_message, + on_win, +) + +if TYPE_CHECKING: + from typing import Any, Literal + + StatsDict = dict[str, Any] + + +def render( + recipe_path: str | os.PathLike | Path, + config: Config | None = None, + variants: dict[str, Any] | None = None, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, + **kwargs, +) -> list[MetaDataTuple]: """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. - Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" - from conda_build.render import render_recipe, finalize_metadata - from conda_build.exceptions import DependencyNeedsBuildingError - from conda_build.conda_interface import NoPackagesFoundError - from collections import OrderedDict + Returns a list of (metadata, need_download, need_reparse in env) tuples""" + + from conda.exceptions import NoPackagesFoundError + + from .exceptions import DependencyNeedsBuildingError + from .render import finalize_metadata, render_recipe + config = get_or_merge_config(config, **kwargs) - metadata_tuples = render_recipe(recipe_path, bypass_env_check=bypass_env_check, - no_download_source=config.no_download_source, - config=config, variants=variants, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) - output_metas = OrderedDict() + metadata_tuples = render_recipe( + recipe_path, + bypass_env_check=bypass_env_check, + no_download_source=config.no_download_source, + config=config, + variants=variants, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) + output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {} for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - permit_undefined_jinja=not finalize, - bypass_env_check=bypass_env_check): + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + permit_undefined_jinja=not finalize, + bypass_env_check=bypass_env_check, + ): if not om.skip() or not config.trim_skip: - if 'type' not in od or od['type'] == 'conda': + if "type" not in od or od["type"] == "conda": if finalize and not om.final: try: - om = finalize_metadata(om, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) + om = finalize_metadata( + om, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) except (DependencyNeedsBuildingError, NoPackagesFoundError): if not permit_unsatisfiable_variants: raise # remove outputs section from output objects for simplicity - if not om.path and om.meta.get('outputs'): - om.parent_outputs = om.meta['outputs'] - del om.meta['outputs'] - - output_metas[om.dist(), om.config.variant.get('target_platform'), - tuple((var, om.config.variant[var]) - for var in om.get_used_vars())] = \ - ((om, download, render_in_env)) + if not om.path and (outputs := om.get_section("outputs")): + om.parent_outputs = outputs + del om.meta["outputs"] + + output_metas[ + om.dist(), + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = MetaDataTuple(om, download, render_in_env) else: - output_metas[f"{om.type}: {om.name()}", om.config.variant.get('target_platform'), - tuple((var, om.config.variant[var]) - for var in om.get_used_vars())] = \ - ((om, download, render_in_env)) + output_metas[ + f"{om.type}: {om.name()}", + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = MetaDataTuple(om, download, render_in_env) return list(output_metas.values()) -def output_yaml(metadata, file_path=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + file_path: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: """Save a rendered recipe in its final form to the path given by file_path""" - from conda_build.render import output_yaml + from .render import output_yaml + return output_yaml(metadata, file_path, suppress_outputs=suppress_outputs) -def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, config=None, - variants=None, **kwargs): +def get_output_file_paths( + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + **kwargs, +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ - from conda_build.render import bldpkg_path - from conda_build.utils import get_skip_message - config = get_or_merge_config(config, **kwargs) + from .render import bldpkg_path - if hasattr(recipe_path_or_metadata, '__iter__') and not isinstance(recipe_path_or_metadata, - str): - list_of_metas = [hasattr(item[0], 'config') for item in recipe_path_or_metadata - if len(item) == 3] + config = get_or_merge_config(config, **kwargs) - if list_of_metas and all(list_of_metas): - metadata = recipe_path_or_metadata - else: - raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") - elif isinstance(recipe_path_or_metadata, str): + if isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). - metadata = render(recipe_path_or_metadata, no_download_source=no_download_source, - variants=variants, config=config, finalize=True, **kwargs) + metadata_tuples = render( + recipe_path_or_metadata, + no_download_source=no_download_source, + variants=variants, + config=config, + finalize=True, + **kwargs, + ) + + elif isinstance(recipe_path_or_metadata, MetaData): + metadata_tuples = [MetaDataTuple(recipe_path_or_metadata, False, False)] + + elif isinstance(recipe_path_or_metadata, Iterable) and all( + isinstance(recipe, MetaDataTuple) + and isinstance(recipe.metadata, MetaData) + and isinstance(recipe.need_download, bool) + and isinstance(recipe.need_reparse, bool) + for recipe in recipe_path_or_metadata + ): + metadata_tuples = recipe_path_or_metadata + else: - assert hasattr(recipe_path_or_metadata, 'config'), ("Expecting metadata object - got {}" - .format(recipe_path_or_metadata)) - metadata = [(recipe_path_or_metadata, None, None)] - # Next, loop over outputs that each metadata defines + raise ValueError( + f"Unknown input type: {type(recipe_path_or_metadata)}; expecting " + "PathLike object, MetaData object, or a list of tuples containing " + "(MetaData, bool, bool)." + ) + + # Next, loop over outputs that each metadata defines outs = [] - for (m, _, _) in metadata: - if m.skip(): - outs.append(get_skip_message(m)) + for metadata, _, _ in metadata_tuples: + if metadata.skip(): + outs.append(get_skip_message(metadata)) else: - outs.append(bldpkg_path(m)) - return sorted(list(set(outs))) - - -def get_output_file_path(recipe_path_or_metadata, no_download_source=False, config=None, - variants=None, **kwargs): - """Get output file paths for any packages that would be created by a recipe - - Both split packages (recipes with more than one output) and build matrices, - created with variants, contribute to the list of file paths here. - """ - log = _get_logger(__name__) - log.warn("deprecation warning: this function has been renamed to get_output_file_paths, " - "to reflect that potentially multiple paths are returned. This function will be " - "removed in the conda-build 4.0 release.") - return get_output_file_paths(recipe_path_or_metadata, - no_download_source=no_download_source, - config=config, variants=variants, **kwargs) + outs.append(bldpkg_path(metadata)) + return sorted(set(outs)) -def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): +def check( + recipe_path: str | os.PathLike | Path, + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + **kwargs, +) -> bool: """Check validity of input recipe path Verifies that recipe can be completely rendered, and that fields of the rendered recipe are valid fields, with some value checking. """ config = get_or_merge_config(config, **kwargs) - metadata = render(recipe_path, no_download_source=no_download_source, - config=config, variants=variants) + metadata = render( + recipe_path, + no_download_source=no_download_source, + config=config, + variants=variants, + ) return all(m[0].check_fields() for m in metadata) -def build(recipe_paths_or_metadata, post=None, need_source_download=True, - build_only=False, notest=False, config=None, variants=None, stats=None, - **kwargs): +def build( + recipe_paths_or_metadata: str | os.PathLike | Path | MetaData, + post: bool | None = None, + need_source_download: bool = True, + build_only: bool = False, + notest: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + stats: StatsDict | None = None, + **kwargs, +) -> list[str]: """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" - import os - from conda_build.build import build_tree - from conda_build.utils import find_recipe + from .build import build_tree - assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " - "other arguments (config) by keyword.") + assert post in (None, True, False), ( + "post must be boolean or None. Remember, you must pass " + "other arguments (config) by keyword." + ) - recipes = [] - for recipe in _ensure_list(recipe_paths_or_metadata): - if isinstance(recipe, str): - for recipe in _expand_globs(recipe, os.getcwd()): + recipes: list[str | MetaData] = [] + for recipe in ensure_list(recipe_paths_or_metadata): + if isinstance(recipe, (str, os.PathLike, Path)): + for recipe in expand_globs(recipe, os.getcwd()): try: - recipe = find_recipe(recipe) + recipes.append(find_recipe(recipe)) except OSError: continue - recipes.append(recipe) - elif hasattr(recipe, "config"): + elif isinstance(recipe, MetaData): recipes.append(recipe) else: raise ValueError(f"Recipe passed was unrecognized object: {recipe}") if not recipes: - raise ValueError(f'No valid recipes found for input: {recipe_paths_or_metadata}') + raise ValueError( + f"No valid recipes found for input: {recipe_paths_or_metadata}" + ) return build_tree( recipes, @@ -186,43 +256,53 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=build_only, post=post, notest=notest, - variants=variants + variants=variants, ) -def test(recipedir_or_package_or_metadata, move_broken=True, config=None, stats=None, **kwargs): +def test( + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + move_broken: bool = True, + config: Config | None = None, + stats: StatsDict | None = None, + **kwargs, +) -> bool: """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" - from conda_build.build import test + from .build import test - if hasattr(recipedir_or_package_or_metadata, 'config'): + if hasattr(recipedir_or_package_or_metadata, "config"): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. - if not stats: - stats = {} + stats = stats or {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided - test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken, - stats=stats) - return test_result + return test( + recipedir_or_package_or_metadata, + config=config, + move_broken=move_broken, + stats=stats, + ) -def list_skeletons(): +def list_skeletons() -> list[str]: """List available skeletons for generating conda recipes from external sources. - The returned list is generally the names of supported repositories (pypi, cran, etc.)""" + The returned list is generally the names of supported repositories (pypi, cran, etc.) + """ import pkgutil - modules = pkgutil.iter_modules([join(dirname(__file__), 'skeletons')]) + + modules = pkgutil.iter_modules([join(dirname(__file__), "skeletons")]) files = [] for _, name, _ in modules: if not name.startswith("_"): @@ -230,34 +310,44 @@ def list_skeletons(): return files -def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, - config=None, **kwargs): +def skeletonize( + packages: str | Iterable[str], + repo: Literal["cpan", "cran", "luarocks", "pypi", "rpm"], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + config: Config | None = None, + **kwargs, +) -> None: """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" version = getattr(config, "version", version) if version: - kwargs.update({'version': version}) + kwargs.update({"version": version}) if recursive: - kwargs.update({'recursive': recursive}) + kwargs.update({"recursive": recursive}) if output_dir != ".": output_dir = expanduser(output_dir) - kwargs.update({'output_dir': output_dir}) + kwargs.update({"output_dir": output_dir}) # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) - config.compute_build_id('skeleton') - packages = _ensure_list(packages) + config.compute_build_id("skeleton") + packages = ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize function, we should hoist the argument # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. - module = getattr(__import__("conda_build.skeletons", globals=globals(), locals=locals(), - fromlist=[repo]), - repo) + module = getattr( + __import__( + "conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo] + ), + repo, + ) func_args = module.skeletonize.__code__.co_varnames kwargs = {name: getattr(config, name) for name in dir(config) if name in func_args} @@ -267,72 +357,126 @@ def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, if arg in kwargs: del kwargs[arg] with config: - skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, - recursive=recursive, config=config, **kwargs) - return skeleton_return - - -def develop(recipe_dir, prefix=_sys.prefix, no_pth_file=False, - build_ext=False, clean=False, uninstall=False): + module.skeletonize( + packages, + output_dir=output_dir, + version=version, + recursive=recursive, + config=config, + **kwargs, + ) + + +def develop( + recipe_dir: str | Iterable[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: """Install a Python package in 'development mode'. -This works by creating a conda.pth file in site-packages.""" + This works by creating a conda.pth file in site-packages.""" from .develop import execute - recipe_dir = _ensure_list(recipe_dir) - return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) - -def convert(package_file, output_dir=".", show_imports=False, platforms=None, force=False, - dependencies=None, verbose=False, quiet=True, dry_run=False): + recipe_dir = ensure_list(recipe_dir) + execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) + + +def convert( + package_file: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = True, + dry_run: bool = False, +) -> None: """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert - platforms = _ensure_list(platforms) - if package_file.endswith('tar.bz2'): - return conda_convert(package_file, output_dir=output_dir, show_imports=show_imports, - platforms=platforms, force=force, verbose=verbose, quiet=quiet, - dry_run=dry_run, dependencies=dependencies) - elif package_file.endswith('.whl'): - raise RuntimeError('Conversion from wheel packages is not ' - 'implemented yet, stay tuned.') + + platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) + if package_file.endswith("tar.bz2"): + return conda_convert( + package_file, + output_dir=output_dir, + show_imports=show_imports, + platforms=platforms, + force=force, + verbose=verbose, + quiet=quiet, + dry_run=dry_run, + dependencies=dependencies, + ) + elif package_file.endswith(".whl"): + raise RuntimeError( + "Conversion from wheel packages is not implemented yet, stay tuned." + ) else: - raise RuntimeError("cannot convert: %s" % package_file) + raise RuntimeError(f"cannot convert: {package_file}") -def test_installable(channel='defaults'): +def test_installable(channel: str = "defaults") -> bool: """Check to make sure that packages in channel are installable. This is a consistency check for the channel.""" from .inspect_pkg import test_installable + return test_installable(channel) -def inspect_linkages(packages, prefix=_sys.prefix, untracked=False, all_packages=False, - show_files=False, groupby='package', sysroot=''): +def inspect_linkages( + packages: str | Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package", "dependency"] = "package", + sysroot: str = "", +) -> str: from .inspect_pkg import inspect_linkages - packages = _ensure_list(packages) - return inspect_linkages(packages, prefix=prefix, untracked=untracked, all_packages=all_packages, - show_files=show_files, groupby=groupby, sysroot=sysroot) + + packages = ensure_list(packages) + return inspect_linkages( + packages, + prefix=prefix, + untracked=untracked, + all_packages=all_packages, + show_files=show_files, + groupby=groupby, + sysroot=sysroot, + ) -def inspect_objects(packages, prefix=_sys.prefix, groupby='filename'): +def inspect_objects(packages, prefix=sys.prefix, groupby="filename"): from .inspect_pkg import inspect_objects - packages = _ensure_list(packages) + + packages = ensure_list(packages) return inspect_objects(packages, prefix=prefix, groupby=groupby) def inspect_prefix_length(packages, min_prefix_length=_prefix_length): - from conda_build.tarcheck import check_prefix_lengths + from .tarcheck import check_prefix_lengths + config = Config(prefix_length=min_prefix_length) - packages = _ensure_list(packages) + packages = ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, config) if prefix_lengths: - print("Packages with binary prefixes shorter than %d characters:" - % min_prefix_length) + print( + "Packages with binary prefixes shorter than %d characters:" + % min_prefix_length + ) for fn, length in prefix_lengths.items(): print(f"{fn} ({length} chars)") else: - print("No packages found with binary prefixes shorter than %d characters." - % min_prefix_length) + print( + "No packages found with binary prefixes shorter than %d characters." + % min_prefix_length + ) return len(prefix_lengths) == 0 @@ -343,88 +487,111 @@ def inspect_hash_inputs(packages): from the package's info/hash_input.json file """ from .inspect_pkg import get_hash_input + return get_hash_input(packages) -def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, - dependencies=(), home=None, license_name=None, summary=None, - config=None, **kwargs): +def create_metapackage( + name, + version, + entry_points=(), + build_string=None, + build_number=0, + dependencies=(), + home=None, + license_name=None, + summary=None, + config=None, + **kwargs, +): from .metapackage import create_metapackage + config = get_or_merge_config(config, **kwargs) - return create_metapackage(name=name, version=version, entry_points=entry_points, - build_string=build_string, build_number=build_number, - dependencies=dependencies, home=home, - license_name=license_name, summary=summary, config=config) - - -def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None, - subdir=None, threads=None, patch_generator=None, verbose=False, progress=False, - hotfix_source_repo=None, current_index_versions=None, **kwargs): - import yaml - import os - from conda_build.index import update_index - from conda_build.utils import ensure_list - dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] - - if isinstance(current_index_versions, str): - with open(current_index_versions) as f: - current_index_versions = yaml.safe_load(f) - - for path in dir_paths: - update_index(path, check_md5=check_md5, channel_name=channel_name, - patch_generator=patch_generator, threads=threads, verbose=verbose, - progress=progress, hotfix_source_repo=hotfix_source_repo, - subdirs=ensure_list(subdir), current_index_versions=current_index_versions, - index_file=kwargs.get('index_file', None)) - - -def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, - output_id=None, config=None, verbose=True, link_source_method='auto', **kwargs): + return create_metapackage( + name=name, + version=version, + entry_points=entry_points, + build_string=build_string, + build_number=build_number, + dependencies=dependencies, + home=home, + license_name=license_name, + summary=summary, + config=config, + ) + + +def debug( + recipe_or_package_path_or_metadata_tuples, + path=None, + test=False, + output_id=None, + config=None, + verbose: bool = True, + link_source_method="auto", + **kwargs, +): """Set up either build/host or test environments, leaving you with a quick tool to debug your package's build or test phase. """ - from fnmatch import fnmatch import logging - import os import time - from conda_build.build import test as run_test, build as run_build - from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, on_win, LoggingContext + from fnmatch import fnmatch + + from .build import build as run_build + from .build import test as run_test + from .metadata import MetaData + is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} path_is_build_dir = False - workdirs = [os.path.join(recipe_or_package_path_or_metadata_tuples, d) - for d in (os.listdir(recipe_or_package_path_or_metadata_tuples) if - os.path.isdir(recipe_or_package_path_or_metadata_tuples) else []) - if (d.startswith('work') and - os.path.isdir(os.path.join(recipe_or_package_path_or_metadata_tuples, d)))] - metadatas_conda_debug = [os.path.join(f, "metadata_conda_debug.yaml") for f in workdirs - if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml"))] + workdirs = [ + os.path.join(recipe_or_package_path_or_metadata_tuples, d) + for d in ( + os.listdir(recipe_or_package_path_or_metadata_tuples) + if os.path.isdir(recipe_or_package_path_or_metadata_tuples) + else [] + ) + if ( + d.startswith("work") + and os.path.isdir( + os.path.join(recipe_or_package_path_or_metadata_tuples, d) + ) + ) + ] + metadatas_conda_debug = [ + os.path.join(f, "metadata_conda_debug.yaml") + for f in workdirs + if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml")) + ] metadatas_conda_debug = sorted(metadatas_conda_debug) if len(metadatas_conda_debug): path_is_build_dir = True path = recipe_or_package_path_or_metadata_tuples if not path: path = os.path.join(default_config.croot, f"debug_{int(time.time() * 1000)}") - config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, - **args) + config = get_or_merge_config( + config=default_config, croot=path, verbose=verbose, _prefix_length=10, **args + ) config.channel_urls = get_channel_urls(kwargs) - metadata_tuples = [] + metadata_tuples: list[MetaDataTuple] = [] - best_link_source_method = 'skip' + best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: - best_link_source_method = 'symlink' - from conda_build.metadata import MetaData + best_link_source_method = "symlink" metadata = MetaData(metadata_conda_debug, config, {}) - metadata_tuples.append((metadata, False, True)) + metadata_tuples.append(MetaDataTuple(metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_PACKAGE_EXTENSIONS): - metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) + metadata_tuples = render( + recipe_or_package_path_or_metadata_tuples, config=config, **kwargs + ) else: # this is a package, we only support testing test = True @@ -436,15 +603,24 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, outputs = get_output_file_paths(metadata_tuples) matched_outputs = outputs if output_id: - matched_outputs = [_ for _ in outputs if fnmatch(os.path.basename(_), output_id)] + matched_outputs = [ + _ for _ in outputs if fnmatch(os.path.basename(_), output_id) + ] if len(matched_outputs) > 1: - raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " - "a single output is found.".format(matched_outputs)) + raise ValueError( + f"Specified --output-id matches more than one output ({matched_outputs}). " + "Please refine your output id so that only a single output is found." + ) elif not matched_outputs: - raise ValueError(f"Specified --output-id did not match any outputs. Available outputs are: {outputs} Please check it and try again") + raise ValueError( + f"Specified --output-id did not match any outputs. Available outputs are: {outputs} " + "Please check it and try again" + ) if len(matched_outputs) > 1 and not path_is_build_dir: - raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " - "to a single output.".format(outputs)) + raise ValueError( + f"More than one output found for this recipe ({outputs}). " + "Please use the --output-id argument to filter down to a single output." + ) else: matched_outputs = outputs @@ -452,11 +628,15 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, # make sure that none of the _placehold stuff gets added to env paths target_metadata.config.prefix_length = 10 - if best_link_source_method == 'symlink': + if best_link_source_method == "symlink": for metadata, _, _ in metadata_tuples: - debug_source_loc = os.path.join(os.sep + 'usr', 'local', 'src', 'conda', - '{}-{}'.format(metadata.get_value('package/name'), - metadata.get_value('package/version'))) + debug_source_loc = os.path.join( + os.sep + "usr", + "local", + "src", + "conda", + f"{metadata.name()}-{metadata.version()}", + ) link_target = os.path.dirname(metadata.meta_path) try: dn = os.path.dirname(debug_source_loc) @@ -468,14 +648,18 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, os.unlink(debug_source_loc) except: pass - print(f"Making debug info source symlink: {debug_source_loc} => {link_target}") + print( + f"Making debug info source symlink: {debug_source_loc} => {link_target}" + ) os.symlink(link_target, debug_source_loc) except PermissionError as e: - raise Exception("You do not have the necessary permissions to create symlinks in {}\nerror: {}" - .format(dn, str(e))) + raise Exception( + f"You do not have the necessary permissions to create symlinks in {dn}\nerror: {str(e)}" + ) except Exception as e: - raise Exception("Unknown error creating symlinks in {}\nerror: {}" - .format(dn, str(e))) + raise Exception( + f"Unknown error creating symlinks in {dn}\nerror: {str(e)}" + ) ext = ".bat" if on_win else ".sh" if verbose: @@ -488,7 +672,10 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", - activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) + activation_file=os.path.join( + target_metadata.config.work_dir, activation_file + ), + ) elif not test: with log_context: run_build(target_metadata, stats={}, provision_only=True) @@ -496,11 +683,16 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", - activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) + activation_file=os.path.join( + target_metadata.config.work_dir, activation_file + ), + ) else: if not is_package: - raise ValueError("Debugging for test mode is only supported for package files that already exist. " - "Please build your package first, then use it to create the debugging environment.") + raise ValueError( + "Debugging for test mode is only supported for package files that already exist. " + "Please build your package first, then use it to create the debugging environment." + ) else: test_input = recipe_or_package_path_or_metadata_tuples # use the package to create an env and extract the test files. Stop short of running the tests. @@ -511,5 +703,6 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=config.test_dir, source="call" if on_win else "source", - activation_file=os.path.join(config.test_dir, activation_file)) + activation_file=os.path.join(config.test_dir, activation_file), + ) return activation_string diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py deleted file mode 100644 index d3b901e3a0..0000000000 --- a/conda_build/bdist_conda.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -bdist_conda - -""" - -import sys -import time - -from collections import defaultdict - -from distutils.command.install import install -from distutils.errors import DistutilsOptionError, DistutilsGetoptError -from distutils.dist import Distribution - -from conda_build.conda_interface import StringIO, configparser -from conda_build.conda_interface import spec_from_line -from conda_build.metadata import MetaData -from conda_build import api -from conda_build.skeletons import pypi -from conda_build.build import handle_anaconda_upload -from conda_build.config import Config - - -# TODO: Add support for all the options that conda build has - - -class CondaDistribution(Distribution): - """ - Distribution subclass that supports bdist_conda options - - This class is required if you want to pass any bdist_conda specific - options to setup(). To use, set distclass=CondaDistribution in setup(). - - **NOTE**: If you use setuptools, you must import setuptools before - importing distutils.commands.bdist_conda. - - Options that can be passed to setup() (must include - distclass=CondaDistribution): - - - conda_buildnum: The build number. Defaults to 0. Can be overridden on - the command line with the --buildnum flag. - - - conda_buildstr: The build string. Default is generated automatically - from the Python version, NumPy version if relevant, and the build - number, like py34_0. - - - conda_import_tests: Whether to automatically run import tests. The - default is True, which runs import tests for the all the modules in - "packages". Also allowed are False, which runs no tests, or a list of - module names to be tested on import. - - - conda_command_tests: Command line tests to run. Default is True, which - runs ``command --help`` for each ``command`` in the console_scripts and - gui_scripts entry_points. Also allowed are False, which doesn't run any - command tests, or a list of command tests to run. - - - conda_binary_relocation: Whether binary files should be made relocatable - (using install_name_tool on OS X or patchelf on Linux). The default is - True. See the "making packages relocatable" section in the conda build - documentation for more information on this. - - - conda_preserve_egg_dir: Whether to preserve the egg directory as - installed by setuptools. The default is True if the package depends on - setuptools or has a setuptools entry_points other than console_scripts - and gui_scripts. - - Command line options: - - --buildnum: Set the build number. Defaults to the conda_buildnum passed to - setup(), or 0. Overrides any conda_buildnum passed to setup(). - - """ - # Unfortunately, there's no way to warn the users that they need to use - # distclass=CondaDistribution when they try to use a conda option to - # setup(). Distribution.__init__ will just print a warning when it sees an - # attr it doesn't recognize, and then it is discarded. - - # attr: default - conda_attrs = { - 'conda_buildnum': 0, - 'conda_buildstr': None, - 'conda_import_tests': True, - 'conda_command_tests': True, - 'conda_binary_relocation': True, - 'conda_preserve_egg_dir': None, - 'conda_features': None, - 'conda_track_features': None, - } - - def __init__(self, attrs=None): - given_attrs = {} - # We need to remove the attrs so that Distribution.__init__ doesn't - # warn about them. - if attrs: - for attr in self.conda_attrs: - if attr in attrs: - given_attrs[attr] = attrs.pop(attr) - - super().__init__(attrs) - - for attr in self.conda_attrs: - setattr(self.metadata, attr, given_attrs.get(attr, self.conda_attrs[attr])) - - -class bdist_conda(install): - description = "create a conda package" - config = Config(build_id="bdist_conda" + "_" + str(int(time.time() * 1000)), - build_is_host=True) - - def initialize_options(self): - super().initialize_options() - self.buildnum = None - self.anaconda_upload = False - - def finalize_options(self): - opt_dict = self.distribution.get_option_dict('install') - if self.prefix: - raise DistutilsOptionError("--prefix is not allowed") - opt_dict['prefix'] = ("bdist_conda", self.config.host_prefix) - super().finalize_options() - - def run(self): - # Make sure the metadata has the conda attributes, even if the - # distclass isn't CondaDistribution. We primarily do this to simplify - # the code below. - - metadata = self.distribution.metadata - - for attr in CondaDistribution.conda_attrs: - if not hasattr(metadata, attr): - setattr(metadata, attr, - CondaDistribution.conda_attrs[attr]) - - # The command line takes precedence - if self.buildnum is not None: - metadata.conda_buildnum = self.buildnum - - d = defaultdict(dict) - # PyPI allows uppercase letters but conda does not, so we fix the - # name here. - d['package']['name'] = metadata.name.lower() - d['package']['version'] = metadata.version - d['build']['number'] = metadata.conda_buildnum - - # MetaData does the auto stuff if the build string is None - d['build']['string'] = metadata.conda_buildstr - - d['build']['binary_relocation'] = metadata.conda_binary_relocation - d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir - d['build']['features'] = metadata.conda_features - d['build']['track_features'] = metadata.conda_track_features - - # XXX: I'm not really sure if it is correct to combine requires - # and install_requires - d['requirements']['run'] = d['requirements']['build'] = \ - [spec_from_line(i) for i in - (metadata.requires or []) + - (getattr(self.distribution, 'install_requires', []) or - [])] + ['python'] - if hasattr(self.distribution, 'tests_require'): - # A lot of packages use extras_require['test'], but - # tests_require is the one that is officially supported by - # setuptools. - d['test']['requires'] = [spec_from_line(i) for i in - self.distribution.tests_require or []] - - d['about']['home'] = metadata.url - # Don't worry about classifiers. This isn't skeleton pypi. We - # don't need to make this work with random stuff in the wild. If - # someone writes their setup.py wrong and this doesn't work, it's - # their fault. - d['about']['license'] = metadata.license - d['about']['summary'] = metadata.description - - # This is similar logic from conda skeleton pypi - entry_points = getattr(self.distribution, 'entry_points', []) - if entry_points: - if isinstance(entry_points, str): - # makes sure it is left-shifted - newstr = "\n".join(x.strip() for x in - entry_points.splitlines()) - c = configparser.ConfigParser() - entry_points = {} - try: - c.read_file(StringIO(newstr)) - except Exception as err: - # This seems to be the best error here - raise DistutilsGetoptError("ERROR: entry-points not understood: " + - str(err) + "\nThe string was" + newstr) - else: - for section in c.sections(): - if section in ['console_scripts', 'gui_scripts']: - value = [f'{option}={c.get(section, option)}' - for option in c.options(section)] - entry_points[section] = value - else: - # Make sure setuptools is added as a dependency below - entry_points[section] = None - - if not isinstance(entry_points, dict): - raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + - entry_points) - else: - rs = entry_points.get('scripts', []) - cs = entry_points.get('console_scripts', []) - gs = entry_points.get('gui_scripts', []) - # We have *other* kinds of entry-points so we need - # setuptools at run-time - if not rs and not cs and not gs and len(entry_points) > 1: - d['requirements']['run'].append('setuptools') - d['requirements']['build'].append('setuptools') - entry_list = rs + cs + gs - if gs and self.config.platform == 'osx': - d['build']['osx_is_app'] = True - if len(cs + gs) != 0: - d['build']['entry_points'] = entry_list - if metadata.conda_command_tests is True: - d['test']['commands'] = list(map(str, - pypi.make_entry_tests(entry_list))) - - if 'setuptools' in d['requirements']['run']: - d['build']['preserve_egg_dir'] = True - - if metadata.conda_import_tests: - if metadata.conda_import_tests is True: - d['test']['imports'] = ((self.distribution.packages or []) + - (self.distribution.py_modules or [])) - else: - d['test']['imports'] = metadata.conda_import_tests - - if (metadata.conda_command_tests and not - isinstance(metadata.conda_command_tests, - bool)): - d['test']['commands'] = list(map(str, metadata.conda_command_tests)) - - d = dict(d) - self.config.keep_old_work = True - m = MetaData.fromdict(d, config=self.config) - # Shouldn't fail, but do you really trust the code above? - m.check_fields() - m.config.set_build_id = False - m.config.variant['python'] = ".".join((str(sys.version_info.major), - str(sys.version_info.minor))) - api.build(m, build_only=True, notest=True) - self.config = m.config - # prevent changes in the build ID from here, so that we're working in the same prefix - # Do the install - super().run() - output = api.build(m, post=True, notest=True)[0] - api.test(output, config=m.config) - m.config.clean() - if self.anaconda_upload: - class args: - anaconda_upload = self.anaconda_upload - handle_anaconda_upload(output, args) - else: - no_upload_message = """\ -# If you want to upload this package to anaconda.org later, type: -# -# $ anaconda upload %s -""" % output - print(no_upload_message) - - -# Distutils looks for user_options on the class (not instance). It also -# requires that it is an instance of list. So we do this here because we want -# to keep the options from the superclass (and because I don't feel like -# making a metaclass just to make this work). - -bdist_conda.user_options.extend([ - ('buildnum=', None, '''The build number of - the conda package. Defaults to 0, or the conda_buildnum specified in the - setup() function. The command line flag overrides the option to - setup().'''), - ('anaconda-upload', None, ("""Upload the finished package to anaconda.org""")), -]) - -bdist_conda.boolean_options.extend(['anaconda-upload']) diff --git a/conda_build/build.py b/conda_build/build.py index c171896686..6dd2b49256 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1,16 +1,14 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module that does most of the heavy lifting for the ``conda build`` command. -''' +""" + +from __future__ import annotations -from collections import deque, OrderedDict import fnmatch -import glob2 import json import os -import warnings -from os.path import isdir, isfile, islink, join, dirname import random import re import shutil @@ -19,82 +17,100 @@ import subprocess import sys import time - -# this is to compensate for a requests idna encoding error. Conda is a better place to fix, -# eventually -# exception is raises: "LookupError: unknown encoding: idna" -# http://stackoverflow.com/a/13057751/1170370 -import encodings.idna # NOQA - -from bs4 import UnicodeDammit -import yaml +import warnings +from collections import OrderedDict, deque +from os.path import dirname, isdir, isfile, islink, join +from pathlib import Path +from typing import TYPE_CHECKING import conda_package_handling.api - -# used to get version -from .conda_interface import env_path_backup_var_exists, conda_45, conda_46 -from .conda_interface import prefix_placeholder -from .conda_interface import TemporaryDirectory -from .conda_interface import VersionOrder -from .conda_interface import CrossPlatformStLink -from .conda_interface import PathType, FileMode -from .conda_interface import EntityEncoder -from .conda_interface import get_rc_urls -from .conda_interface import url_path -from .conda_interface import root_dir -from .conda_interface import MatchSpec -from .conda_interface import reset_context -from .conda_interface import context -from .conda_interface import UnsatisfiableError -from .conda_interface import NoPackagesFoundError -from .conda_interface import CondaError -from .conda_interface import pkgs_dirs -from .conda_interface import get_conda_channel -from .utils import (CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, env_var, glob, - shutil_move_more_retrying, tmp_chdir) -from conda_build import environ, source, tarcheck, utils -from conda_build.config import Config -from conda_build.index import get_build_index, update_index -from conda_build.render import (output_yaml, bldpkg_path, render_recipe, reparse, distribute_variants, - expand_outputs, try_download, execute_download_actions, - add_upstream_pins) -import conda_build.os_utils.external as external -from conda_build.metadata import FIELDS, MetaData -from conda_build.post import (post_process, post_build, - fix_permissions, get_build_metadata) - -from conda_build.exceptions import DependencyNeedsBuildingError, CondaBuildException -from conda_build.variants import (set_language_env_vars, dict_of_lists_to_list_of_dicts, - get_package_variants) -from conda_build.create_test import create_all_test_files - -import conda_build.noarch_python as noarch_python - +import yaml +from bs4 import UnicodeDammit from conda import __version__ as conda_version -from conda_build import __version__ as conda_build_version - -if sys.platform == 'win32': - import conda_build.windows as windows - -if 'bsd' in sys.platform: - shell_path = '/bin/sh' +from conda.auxlib.entity import EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER +from conda.base.context import context, reset_context +from conda.core.prefix_data import PrefixData +from conda.exceptions import CondaError, NoPackagesFoundError, UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.channel import Channel +from conda.models.enums import FileMode, PathType +from conda.models.match_spec import MatchSpec +from conda.utils import url_path + +from . import __version__ as conda_build_version +from . import environ, noarch_python, source, tarcheck, utils +from .config import Config +from .create_test import create_all_test_files +from .exceptions import CondaBuildException, DependencyNeedsBuildingError +from .index import _delegated_update_index, get_build_index +from .metadata import FIELDS, MetaData +from .os_utils import external +from .post import ( + fix_permissions, + get_build_metadata, + post_build, + post_process, +) +from .render import ( + add_upstream_pins, + bldpkg_path, + distribute_variants, + execute_download_actions, + expand_outputs, + output_yaml, + render_recipe, + reparse, + try_download, +) +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, + env_var, + glob, + on_linux, + on_mac, + on_win, + shutil_move_more_retrying, + tmp_chdir, + write_bat_activation_text, +) +from .variants import ( + dict_of_lists_to_list_of_dicts, + get_package_variants, + set_language_env_vars, +) + +if on_win: + from . import windows + +if TYPE_CHECKING: + from typing import Any, Iterable + +if "bsd" in sys.platform: + shell_path = "/bin/sh" elif utils.on_win: - shell_path = 'bash' + shell_path = "bash" else: - shell_path = '/bin/bash' + shell_path = "/bin/bash" def stats_key(metadata, desc): # get the build string from whatever conda-build makes of the configuration used_loop_vars = metadata.get_used_loop_vars() - build_vars = '-'.join([k + '_' + str(metadata.config.variant[k]) for k in used_loop_vars - if k != 'target_platform']) + build_vars = "-".join( + [ + k + "_" + str(metadata.config.variant[k]) + for k in used_loop_vars + if k != "target_platform" + ] + ) # kind of a special case. Target platform determines a lot of output behavior, but may not be # explicitly listed in the recipe. - tp = metadata.config.variant.get('target_platform') - if tp and tp != metadata.config.subdir and 'target_platform' not in build_vars: - build_vars += '-target_' + tp + tp = metadata.config.variant.get("target_platform") + if tp and tp != metadata.config.subdir and "target_platform" not in build_vars: + build_vars += "-target_" + tp key = [metadata.name(), metadata.version()] if build_vars: key.append(build_vars) @@ -114,37 +130,42 @@ def log_stats(stats_dict, descriptor): " Time elapsed: {elapsed}\n" "\n".format( descriptor=descriptor, - processes=stats_dict.get('processes', 1), - cpu_sys=utils.seconds2human(stats_dict["cpu_sys"]) if stats_dict.get("cpu_sys") else "-", - cpu_user=utils.seconds2human(stats_dict["cpu_user"]) if stats_dict.get("cpu_user") else "-", - memory=utils.bytes2human(stats_dict["rss"]) if stats_dict.get("rss") else "-", + processes=stats_dict.get("processes", 1), + cpu_sys=utils.seconds2human(stats_dict["cpu_sys"]) + if stats_dict.get("cpu_sys") + else "-", + cpu_user=utils.seconds2human(stats_dict["cpu_user"]) + if stats_dict.get("cpu_user") + else "-", + memory=utils.bytes2human(stats_dict["rss"]) + if stats_dict.get("rss") + else "-", disk=utils.bytes2human(stats_dict["disk"]), elapsed=utils.seconds2human(stats_dict["elapsed"]), ) ) -def create_post_scripts(m): - ''' +def create_post_scripts(m: MetaData): + """ Create scripts to run after build step - ''' - ext = '.bat' if utils.on_win else '.sh' - for tp in 'pre-link', 'post-link', 'pre-unlink': + """ + ext = ".bat" if utils.on_win else ".sh" + for tp in "pre-link", "post-link", "pre-unlink": # To have per-output link scripts they must be prefixed by the output name or be explicitly # specified in the build section - is_output = 'package:' not in m.get_recipe_text() + is_output = "package:" not in m.get_recipe_text() scriptname = tp if is_output: - if m.meta.get('build', {}).get(tp, ''): - scriptname = m.meta['build'][tp] - else: - scriptname = m.name() + '-' + tp + scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}") scriptname += ext - dst_name = '.' + m.name() + '-' + tp + ext + dst_name = f".{m.name()}-{tp}{ext}" src = join(m.path, scriptname) if isfile(src): - dst_dir = join(m.config.host_prefix, - 'Scripts' if m.config.host_subdir.startswith('win-') else 'bin') + dst_dir = join( + m.config.host_prefix, + "Scripts" if m.config.host_subdir.startswith("win-") else "bin", + ) if not isdir(dst_dir): os.makedirs(dst_dir, 0o775) dst = join(dst_dir, dst_name) @@ -153,115 +174,15 @@ def create_post_scripts(m): def prefix_replacement_excluded(path): - if path.endswith(('.pyc', '.pyo')) or not isfile(path): + if path.endswith((".pyc", ".pyo")) or not isfile(path): return True - if sys.platform != 'darwin' and islink(path): + if not on_mac and islink(path): # OSX does not allow hard-linking symbolic links, so we cannot # skip symbolic links (as we can on Linux) return True return False -def have_prefix_files(files, prefix): - ''' - Yields files that contain the current prefix in them, and modifies them - to replace the prefix with a placeholder. - - :param files: Filenames to check for instances of prefix - :type files: list of tuples containing strings (prefix, mode, filename) - ''' - - prefix_bytes = prefix.encode(utils.codec) - prefix_placeholder_bytes = prefix_placeholder.encode(utils.codec) - searches = {prefix: prefix_bytes} - if utils.on_win: - # some windows libraries use unix-style path separators - forward_slash_prefix = prefix.replace('\\', '/') - forward_slash_prefix_bytes = forward_slash_prefix.encode(utils.codec) - searches[forward_slash_prefix] = forward_slash_prefix_bytes - # some windows libraries have double backslashes as escaping - double_backslash_prefix = prefix.replace('\\', '\\\\') - double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) - searches[double_backslash_prefix] = double_backslash_prefix_bytes - searches[prefix_placeholder] = prefix_placeholder_bytes - min_prefix = min(len(k) for k, _ in searches.items()) - - # mm.find is incredibly slow, so ripgrep is used to pre-filter the list. - # Really, ripgrep could be used on its own with a bit more work though. - rg_matches = [] - prefix_len = len(prefix) + 1 - rg = external.find_executable('rg') - if rg: - for rep_prefix, _ in searches.items(): - try: - args = [rg, - '--unrestricted', - '--no-heading', - '--with-filename', - '--files-with-matches', - '--fixed-strings', - '--text', - rep_prefix, - prefix] - matches = subprocess.check_output(args) - rg_matches.extend(matches.decode('utf-8').replace('\r\n', '\n').splitlines()) - except subprocess.CalledProcessError: - continue - # HACK: this is basically os.path.relpath, just simpler and faster - # NOTE: path normalization needs to be in sync with create_info_files - if utils.on_win: - rg_matches = [rg_match.replace('\\', '/')[prefix_len:] for rg_match in rg_matches] - else: - rg_matches = [rg_match[prefix_len:] for rg_match in rg_matches] - else: - print("WARNING: Detecting which files contain PREFIX is slow, installing ripgrep makes it faster." - " 'conda install ripgrep'") - - for f in files: - if os.path.isabs(f): - f = f[prefix_len:] - if rg_matches and f not in rg_matches: - continue - path = os.path.join(prefix, f) - if prefix_replacement_excluded(path): - continue - - # dont try to mmap an empty file, and no point checking files that are smaller - # than the smallest prefix. - if os.stat(path).st_size < min_prefix: - continue - - try: - fi = open(path, 'rb+') - except OSError: - log = utils.get_logger(__name__) - log.warn("failed to open %s for detecting prefix. Skipping it." % f) - continue - try: - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) - except OSError: - mm = fi.read() - - mode = 'binary' if mm.find(b'\x00') != -1 else 'text' - if mode == 'text': - # TODO :: Ask why we do not do this on Windows too?! - if not utils.on_win and mm.find(prefix_bytes) != -1: - # Use the placeholder for maximal backwards compatibility, and - # to minimize the occurrences of usernames appearing in built - # packages. - data = mm[:] - mm.close() - fi.close() - rewrite_file_with_new_prefix(path, data, prefix_bytes, prefix_placeholder_bytes) - fi = open(path, 'rb+') - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) - for rep_prefix, rep_prefix_bytes in searches.items(): - if mm.find(rep_prefix_bytes) != -1: - yield (rep_prefix, mode, f) - mm.close() - fi.close() - - # It may be that when using the list form of passing args to subprocess # what matters is the number of arguments rather than the accumulated # string length. In that case, len(l[i]) should become 1, and we should @@ -275,36 +196,51 @@ def chunks(line, n): # + 3 incase a shell is used: 1 space and 2 quotes. size = size + len(line[i]) + 3 if i == len(line) - 1: - yield line[start:i + 1] + yield line[start : i + 1] elif size > n: - yield line[start:i + 1] + yield line[start : i + 1] start = i size = 0 def get_bytes_or_text_as_bytes(parent): - if 'bytes' in parent: - return parent['bytes'] - return parent['text'].encode('utf-8') - - -def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, - also_binaries=False, debug_this=False, match_records=OrderedDict()): + if "bytes" in parent: + return parent["bytes"] + return parent["text"].encode("utf-8") + + +def regex_files_rg( + files, + prefix, + tag, + rg, + regex_rg, + replacement_re, + also_binaries=False, + debug_this=False, + match_records=OrderedDict(), +): # If we run out of space for args (could happen) we'll need to either: # 1. Batching the calls. # 2. Call for all (text?) files by passing just 'prefix' then filter out ones we don't care about (slow). # 3. Use a shell prefixed with `cd prefix && ` (could still hit size limits, just later). # I have gone for batching! - args_base = [rg.encode('utf-8'), - b'--unrestricted', - b'--no-heading', - b'--with-filename', - b'--json', - regex_rg] - pu = prefix.encode('utf-8') - prefix_files = [os.path.join(pu, f.replace('/', os.sep).encode('utf-8')) for f in files] - args_len = len(b' '.join(args_base)) - file_lists = list(chunks(prefix_files, (32760 if utils.on_win else 131071) - args_len)) + args_base = [ + rg.encode("utf-8"), + b"--unrestricted", + b"--no-heading", + b"--with-filename", + b"--json", + regex_rg, + ] + pu = prefix.encode("utf-8") + prefix_files = [ + os.path.join(pu, f.replace("/", os.sep).encode("utf-8")) for f in files + ] + args_len = len(b" ".join(args_base)) + file_lists = list( + chunks(prefix_files, (32760 if utils.on_win else 131071) - args_len) + ) for file_list in file_lists: args = args_base[:] + file_list # This will not work now our args are binary strings: @@ -312,9 +248,11 @@ def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, # print(quote_for_shell(args)) try: if utils.on_win: - args = [a.decode('utf-8') for a in args] - matches = subprocess.check_output(args, shell=False).rstrip(b'\n').split(b'\n') - matches = b'[' + b','.join(matches) + b']\n' + args = [a.decode("utf-8") for a in args] + matches = ( + subprocess.check_output(args, shell=False).rstrip(b"\n").split(b"\n") + ) + matches = b"[" + b",".join(matches) + b"]\n" matches = json.loads(matches) except subprocess.CalledProcessError as _: # noqa # Just means rg returned 1 as no matches were found. @@ -322,61 +260,85 @@ def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, except Exception as e: raise e if matches: - stage = 'pre-begin' + stage = "pre-begin" for match in matches: - new_stage = match['type'] - if new_stage == 'begin': + new_stage = match["type"] + if new_stage == "begin": stage = new_stage - match_filename_begin = match['data']['path']['text'][len(prefix) + 1:].replace(os.sep, '/') - match_filename_type = 'unknown' + match_filename_begin = match["data"]["path"]["text"][ + len(prefix) + 1 : + ].replace(os.sep, "/") + match_filename_type = "unknown" # TODO :: Speed this up, and generalise it, the python version does similar. - with open(os.path.join(prefix, match_filename_begin), 'rb') as fh: + with open(os.path.join(prefix, match_filename_begin), "rb") as fh: data = mmap_or_read(fh) - match_filename_type = 'binary' if data.find(b'\x00') != -1 else 'text' - assert match_filename_type != 'unknown' - elif new_stage == 'match': + match_filename_type = ( + "binary" if data.find(b"\x00") != -1 else "text" + ) + assert match_filename_type != "unknown" + elif new_stage == "match": old_stage = stage - assert stage == 'begin' or stage == 'match' or stage == 'end' + assert stage == "begin" or stage == "match" or stage == "end" stage = new_stage - match_filename = match['data']['path']['text'][len(prefix) + 1:].replace(os.sep, '/') + match_filename = match["data"]["path"]["text"][ + len(prefix) + 1 : + ].replace(os.sep, "/") # Get stuff from the 'line' (to be consistent with the python version we ignore this). # match_line = get_bytes_or_text_as_bytes(match['data']['lines']) # match_line_number = match['data']['line_number'] # match_absolute_offset = match['data']['absolute_offset'] - if old_stage == 'begin': - assert match_filename_begin == match_filename, '{} != \n {}'\ - .format(match_filename_begin, match_filename) + if old_stage == "begin": + assert ( + match_filename_begin == match_filename + ), f"{match_filename_begin} != \n {match_filename}" if match_filename not in match_records: if debug_this: # We could add: #'line': match_line, 'line_number': match_line_number but it would # break our ability to compare against the python code. - match_records[match_filename] = {'type': match_filename_type, - 'submatches': []} + match_records[match_filename] = { + "type": match_filename_type, + "submatches": [], + } else: - match_records[match_filename] = {'type': match_filename_type, - 'submatches': []} - for submatch in match['data']['submatches']: - submatch_match_text = get_bytes_or_text_as_bytes(submatch['match']) - submatch_start = submatch['start'] + match['data']['absolute_offset'] - submatch_end = submatch['end'] + match['data']['absolute_offset'] + match_records[match_filename] = { + "type": match_filename_type, + "submatches": [], + } + for submatch in match["data"]["submatches"]: + submatch_match_text = get_bytes_or_text_as_bytes( + submatch["match"] + ) + submatch_start = ( + submatch["start"] + match["data"]["absolute_offset"] + ) + submatch_end = ( + submatch["end"] + match["data"]["absolute_offset"] + ) # print("{}({}) :: {}..{} = {}".format( # match_filename, match_line_number, # submatch_start, submatch_end, submatch_match_text)) - submatch_record = {'tag': tag, - 'text': submatch_match_text, - 'start': submatch_start, - 'end': submatch_end, - 'regex_re': regex_rg, - 'replacement_re': replacement_re} - if submatch_record not in match_records[match_filename]['submatches']: - match_records[match_filename]['submatches'].append(submatch_record) - elif new_stage == 'end': - assert stage == 'match' + submatch_record = { + "tag": tag, + "text": submatch_match_text, + "start": submatch_start, + "end": submatch_end, + "regex_re": regex_rg, + "replacement_re": replacement_re, + } + if ( + submatch_record + not in match_records[match_filename]["submatches"] + ): + match_records[match_filename]["submatches"].append( + submatch_record + ) + elif new_stage == "end": + assert stage == "match" stage = new_stage - elif new_stage == 'elpased_total': - assert stage == 'end' + elif new_stage == "elpased_total": + assert stage == "end" stage = new_stage - print('ELAPSED TOTAL') + print("ELAPSED TOTAL") return sort_matches(match_records) @@ -388,17 +350,25 @@ def mmap_or_read(fh): return mm -def regex_files_py(files, prefix, tag, regex_re, replacement_re, - also_binaries=False, match_records=OrderedDict()): +def regex_files_py( + files, + prefix, + tag, + regex_re, + replacement_re, + also_binaries=False, + match_records=OrderedDict(), +): import re + re_re = re.compile(regex_re) for file in files: - with open(join(prefix, file), 'rb+') as f: + with open(join(prefix, file), "rb+") as f: if os.fstat(f.fileno()).st_size == 0: continue data = mmap_or_read(f) - type = 'binary' if data.find(b'\x00') != -1 else 'text' - if not also_binaries and type == 'binary': + type = "binary" if data.find(b"\x00") != -1 else "text" + if not also_binaries and type == "binary": continue # data2 = f.read() for match in re.finditer(re_re, data): @@ -406,11 +376,15 @@ def regex_files_py(files, prefix, tag, regex_re, replacement_re, # absolute_offset = match.pos if file not in match_records: # Could add 'absolute_offset': absolute_offset, - match_records[file] = {'type': type, - 'submatches': []} + match_records[file] = {"type": type, "submatches": []} # else: # if match_records[file]['absolute_offset'] != absolute_offset: - # print("Dropping match.pos() of {}, neq {}".format(absolute_offset, match_records[file]['absolute_offset'])) + # print( + # "Dropping match.pos() of {}, neq {}".format( + # absolute_offset, + # match_records[file]['absolute_offset'], + # ) + # ) g_index = len(match.groups()) if g_index == 0: # Complete match. @@ -422,12 +396,16 @@ def regex_files_py(files, prefix, tag, regex_re, replacement_re, submatch_start = match.start(g_index) submatch_end = match.end(g_index) # print("found {} ({}..{})".format(submatch_match_text, submatch_start, submatch_end)) - match_records[file]['submatches'].append({'tag': tag, - 'text': submatch_match_text, - 'start': submatch_start, - 'end': submatch_end, - 'regex_re': regex_re, - 'replacement_re': replacement_re}) + match_records[file]["submatches"].append( + { + "tag": tag, + "text": submatch_match_text, + "start": submatch_start, + "end": submatch_end, + "regex_re": regex_re, + "replacement_re": replacement_re, + } + ) # assert data2[match.start(g_index):match.end(g_index)] == match_text # print(data2[match.start(g_index):match.end(g_index)]) return sort_matches(match_records) @@ -437,37 +415,48 @@ def regex_matches_tighten_re(match_records, regex_re, tag=None): # Do we need to shrink the matches? if match_records: import re + re_re = re.compile(regex_re) for filename, match in match_records.items(): - for submatch in match['submatches']: - if tag and submatch['tag'] != tag: + for submatch in match["submatches"]: + if tag and submatch["tag"] != tag: continue - match_re = re.match(re_re, submatch['text']) + match_re = re.match(re_re, submatch["text"]) if match_re: groups = match_re.groups() if groups: match_tigher = match_re.group(len(groups)) else: match_tigher = str(match_re) - if match_tigher != submatch['text']: + if match_tigher != submatch["text"]: # Assert we can find submatches correctly at their start and end in the line. - if 'line' in match: - assert (match['line'][submatch['start'] - - match['absolute_offset']:submatch['end'] - - match['absolute_offset']] == submatch['text']) - index = submatch['text'].find(match_tigher) + if "line" in match: + assert ( + match["line"][ + submatch["start"] + - match["absolute_offset"] : submatch["end"] + - match["absolute_offset"] + ] + == submatch["text"] + ) + index = submatch["text"].find(match_tigher) assert index != -1 - submatch['start'] += index - submatch['end'] = submatch['start'] + len(match_tigher) + submatch["start"] += index + submatch["end"] = submatch["start"] + len(match_tigher) # print("from {} to {} (index={})".format(submatch['text'], match_tigher, index)) - submatch['text'] = match_tigher + submatch["text"] = match_tigher # Assert we can still find submatches correctly at their start and end in the line. - if 'line' in match: - assert (match['line'][submatch['start'] - - match['absolute_offset']:submatch['end'] - - match['absolute_offset']] == submatch['text']) + if "line" in match: + assert ( + match["line"][ + submatch["start"] + - match["absolute_offset"] : submatch["end"] + - match["absolute_offset"] + ] + == submatch["text"] + ) # Even if the match was not tighter we overwrite the regex. - submatch['regex_re'] = regex_re + submatch["regex_re"] = regex_re else: print("ERROR :: Tighter regex_re does not match") return sort_matches(match_records) @@ -477,7 +466,7 @@ def regex_matches_tighten_re(match_records, regex_re, tag=None): def sort_matches(match_records): match_records_o = OrderedDict(sorted(match_records.items())) for file, match in match_records_o.items(): - match['submatches'] = sorted(match['submatches'], key=lambda x: x['start']) + match["submatches"] = sorted(match["submatches"], key=lambda x: x["start"]) return match_records_o @@ -485,19 +474,36 @@ def check_matches(prefix, match_records): print("::CHECKING MATCHES::") for file, match in match_records.items(): data = None - with open(join(prefix, file), 'rb+') as f: + with open(join(prefix, file), "rb+") as f: data = f.read() if data: - for submatch in match['submatches']: - file_content = data[submatch['start']:submatch['end']] - if file_content != submatch['text']: - print("ERROR :: file_content {} != submatch {}".format(file_content, submatch['text'])) - print("{} :: ({}..{}) = {}".format(file, submatch['start'], submatch['end'], submatch['text'])) + for submatch in match["submatches"]: + file_content = data[submatch["start"] : submatch["end"]] + if file_content != submatch["text"]: + print( + "ERROR :: file_content {} != submatch {}".format( + file_content, submatch["text"] + ) + ) + print( + "{} :: ({}..{}) = {}".format( + file, submatch["start"], submatch["end"], submatch["text"] + ) + ) -def have_regex_files(files, prefix, tag, regex_re, replacement_re, - also_binaries=False, match_records={}, regex_rg=None, debug=False): - ''' +def have_regex_files( + files, + prefix, + tag, + regex_re, + replacement_re, + also_binaries=False, + match_records={}, + regex_rg=None, + debug=False, +): + """ :param files: Filenames to check for instances of regex_re :param prefix: Prefix in which to search for these files :param regex_re: The regex to use @@ -511,44 +517,62 @@ def have_regex_files(files, prefix, tag, regex_re, replacement_re, decision. :param match_records: A dictionary of previous results should you wish to augment it :return: input match_records augmented with matches - ''' + """ if not len(files): return match_records import copy - match_records_rg, match_records_re = copy.deepcopy(match_records), copy.deepcopy(match_records) + + match_records_rg, match_records_re = ( + copy.deepcopy(match_records), + copy.deepcopy(match_records), + ) if not isinstance(regex_re, (bytes, bytearray)): - regex_re = regex_re.encode('utf-8') + regex_re = regex_re.encode("utf-8") if regex_rg and not isinstance(regex_rg, (bytes, bytearray)): - regex_rg = regex_rg.encode('utf-8') - rg = external.find_executable('rg') + regex_rg = regex_rg.encode("utf-8") + rg = external.find_executable("rg") if rg: - match_records_rg = regex_files_rg(files, prefix, tag, - rg, - regex_rg if regex_rg else regex_re, - replacement_re, - also_binaries=also_binaries, - debug_this=debug, - match_records=match_records_rg) + match_records_rg = regex_files_rg( + files, + prefix, + tag, + rg, + regex_rg if regex_rg else regex_re, + replacement_re, + also_binaries=also_binaries, + debug_this=debug, + match_records=match_records_rg, + ) if regex_rg and regex_re: match_records_rg = regex_matches_tighten_re(match_records_rg, regex_re, tag) if not rg or debug: - match_records_re = regex_files_py(files, prefix, tag, - regex_re if regex_re else regex_rg, - replacement_re, - also_binaries=also_binaries, - match_records=match_records_re) + match_records_re = regex_files_py( + files, + prefix, + tag, + regex_re if regex_re else regex_rg, + replacement_re, + also_binaries=also_binaries, + match_records=match_records_re, + ) if debug: check_matches(prefix, match_records_rg) check_matches(prefix, match_records_re) if match_records_rg != match_records_re: - for (k, v), (k2, v2) in zip(match_records_rg.items(), match_records_re.items()): + for (k, v), (k2, v2) in zip( + match_records_rg.items(), match_records_re.items() + ): if k != k2: print(f"File Mismatch:\n{k}\n{k2}") elif v != v2: print(f"Match Mismatch ({v}):\n{v2}\n{k}") - for submatch, submatch2 in zip(v['submatches'], v2['submatches']): + for submatch, submatch2 in zip( + v["submatches"], v2["submatches"] + ): if submatch != submatch2: - print(f"Submatch Mismatch ({submatch}):\n{submatch2}\n{k}") + print( + f"Submatch Mismatch ({submatch}):\n{submatch2}\n{k}" + ) return match_records_rg if rg else match_records_re @@ -558,7 +582,7 @@ def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix): st = os.stat(path) data = data.replace(old_prefix, new_prefix) # Save as - with open(path, 'wb') as fo: + with open(path, "wb") as fo: fo.write(data) os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w return data @@ -567,61 +591,71 @@ def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix): def perform_replacements(matches, prefix, verbose=False, diff=None): for file, match in matches.items(): filename = os.path.join(prefix, file) - filename_tmp = filename + '.cbpatch.tmp' + filename_tmp = filename + ".cbpatch.tmp" if os.path.exists(filename_tmp): os.unlink() shutil.copy2(filename, filename_tmp) - filename_short = filename.replace(prefix + os.sep, '') - print("Patching '{}' in {} {}".format(filename_short, - len(match['submatches']), - 'places' if len(match['submatches']) > 1 else 'place')) - with open(filename_tmp, 'wb+') as file_tmp: + filename_short = filename.replace(prefix + os.sep, "") + print( + "Patching '{}' in {} {}".format( + filename_short, + len(match["submatches"]), + "places" if len(match["submatches"]) > 1 else "place", + ) + ) + with open(filename_tmp, "wb+") as file_tmp: file_tmp.truncate() - with open(filename, 'rb') as file: + with open(filename, "rb") as file: last_index = 0 - for submatch in match['submatches']: - length = submatch['start'] - last_index + for submatch in match["submatches"]: + length = submatch["start"] - last_index data = file.read(length) assert len(data) == length file_tmp.write(data) - original = submatch['text'] + original = submatch["text"] # Ideally you wouldn't pass to this function any submatches with replacement_re of None, # Still, it's easily handled. - if submatch['replacement_re']: - replacement_re = submatch['replacement_re'] + if submatch["replacement_re"]: + replacement_re = submatch["replacement_re"] if not isinstance(replacement_re, (bytes, bytearray)): - replacement_re = replacement_re.encode('utf-8') - new_string = re.sub(submatch['regex_re'], replacement_re, original) + replacement_re = replacement_re.encode("utf-8") + new_string = re.sub( + submatch["regex_re"], replacement_re, original + ) else: new_string = original - if match['type'] == 'binary': + if match["type"] == "binary": if len(original) < len(new_string): - print("ERROR :: Cannot replace {} with {} in binary file {}".format(original, - new_string, - filename)) - new_string = new_string.ljust(len(original), b'\0') + print( + f"ERROR :: Cannot replace {original} with {new_string} in binary file {filename}" + ) + new_string = new_string.ljust(len(original), b"\0") assert len(new_string) == len(original) file_tmp.write(new_string) # discarded (but also verified) actual_original = file.read(len(original)) - if match['type'] == 'binary': + if match["type"] == "binary": assert actual_original == original last_index += length + len(original) - if submatch == match['submatches'][len(match['submatches']) - 1]: + if submatch == match["submatches"][len(match["submatches"]) - 1]: # Write the remainder. data = file.read() file_tmp.write(data) # Could assert the lengths of binaries are the same here for extra safety. if os.path.exists(filename_tmp): - if diff and match['type'] == 'text': + if diff and match["type"] == "text": diffo = f"Diff returned no difference after patching {filename_short}" # Always expect an exception. try: - diffo = subprocess.check_output([diff, '-urN', filename, filename_tmp], stderr=subprocess.PIPE) - print(f'WARNING :: Non-deferred patching of "{filename}" did not change it') + diffo = subprocess.check_output( + [diff, "-urN", filename, filename_tmp], stderr=subprocess.PIPE + ) + print( + f'WARNING :: Non-deferred patching of "{filename}" did not change it' + ) except subprocess.CalledProcessError as e: diffo = e.output - print(diffo.decode('utf-8')) + print(diffo.decode("utf-8")) if os.path.exists(filename): os.unlink(filename) shutil.move(filename_tmp, filename) @@ -629,7 +663,7 @@ def perform_replacements(matches, prefix, verbose=False, diff=None): def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None): files = utils.rec_glob(path, "*") - file_paths = sorted(f.replace(path + os.sep, '') for f in files) + file_paths = sorted(f.replace(path + os.sep, "") for f in files) # when this actually has a value, we're copying the top-level recipe into a subdirectory, # so that we have record of what parent recipe produced subpackages. @@ -637,36 +671,47 @@ def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None): dest_dir = join(dest_dir, destination_subdir) else: # exclude meta.yaml because the json dictionary captures its content - file_paths = [f for f in file_paths if not (f == 'meta.yaml' or - f == 'conda_build_config.yaml')] + file_paths = [ + f + for f in file_paths + if not (f == "meta.yaml" or f == "conda_build_config.yaml") + ] file_paths = utils.filter_files(file_paths, path) for f in file_paths: - utils.copy_into(join(path, f), join(dest_dir, f), - timeout=config.timeout, - locking=config.locking, clobber=True) + utils.copy_into( + join(path, f), + join(dest_dir, f), + timeout=config.timeout, + locking=config.locking, + clobber=True, + ) def _copy_output_recipe(m, dest_dir): - _copy_top_level_recipe(m.path, m.config, dest_dir, 'parent') + _copy_top_level_recipe(m.path, m.config, dest_dir, "parent") this_output = m.get_rendered_output(m.name()) or {} - install_script = this_output.get('script') + install_script = this_output.get("script") build_inputs = [] inputs = [install_script] + build_inputs file_paths = [script for script in inputs if script] file_paths = utils.filter_files(file_paths, m.path) for f in file_paths: - utils.copy_into(join(m.path, f), join(dest_dir, f), - timeout=m.config.timeout, - locking=m.config.locking, clobber=True) + utils.copy_into( + join(m.path, f), + join(dest_dir, f), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) def copy_recipe(m): if m.config.include_recipe and m.include_recipe(): # store the rendered meta.yaml file, plus information about where it came from # and what version of conda-build created it - recipe_dir = join(m.config.info_dir, 'recipe') + recipe_dir = join(m.config.info_dir, "recipe") try: os.makedirs(recipe_dir) except: @@ -682,27 +727,28 @@ def copy_recipe(m): output_metadata = m.copy() # hard code the build string, so that tests don't get it mixed up - build = output_metadata.meta.get('build', {}) - build['string'] = output_metadata.build_id() - output_metadata.meta['build'] = build + build = output_metadata.meta.get("build", {}) + build["string"] = output_metadata.build_id() + output_metadata.meta["build"] = build # just for lack of confusion, don't show outputs in final rendered recipes - if 'outputs' in output_metadata.meta: - del output_metadata.meta['outputs'] - if 'parent_recipe' in output_metadata.meta.get('extra', {}): - del output_metadata.meta['extra']['parent_recipe'] + if "outputs" in output_metadata.meta: + del output_metadata.meta["outputs"] + if "parent_recipe" in output_metadata.meta.get("extra", {}): + del output_metadata.meta["extra"]["parent_recipe"] - utils.sort_list_in_nested_structure(output_metadata.meta, - ('build/script', 'test/commands')) + utils.sort_list_in_nested_structure( + output_metadata.meta, ("build/script", "test/commands") + ) rendered = output_yaml(output_metadata) if original_recipe: - with open(original_recipe, 'rb') as f: + with open(original_recipe, "rb") as f: original_recipe_text = UnicodeDammit(f.read()).unicode_markup if not original_recipe or not original_recipe_text == rendered: - with open(join(recipe_dir, "meta.yaml"), 'w') as f: + with open(join(recipe_dir, "meta.yaml"), "w") as f: f.write(f"# This file created by conda-build {conda_build_version}\n") if original_recipe: f.write("# meta.yaml template originally from:\n") @@ -710,25 +756,33 @@ def copy_recipe(m): f.write("# ------------------------------------------------\n\n") f.write(rendered) if original_recipe: - utils.copy_into(original_recipe, os.path.join(recipe_dir, 'meta.yaml.template'), - timeout=m.config.timeout, locking=m.config.locking, clobber=True) + utils.copy_into( + original_recipe, + os.path.join(recipe_dir, "meta.yaml.template"), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) # dump the full variant in use for this package to the recipe folder - with open(os.path.join(recipe_dir, 'conda_build_config.yaml'), 'w') as f: + with open(os.path.join(recipe_dir, "conda_build_config.yaml"), "w") as f: yaml.dump(m.config.variant, f) def copy_readme(m): - readme = m.get_value('about/readme') + readme = m.get_value("about/readme") if readme: src = join(m.config.work_dir, readme) if not isfile(src): - sys.exit("Error: no readme file: %s" % readme) + sys.exit(f"Error: no readme file: {readme}") dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: - print("WARNING: anaconda.org only recognizes about/readme " - "as README.md and README.rst", file=sys.stderr) + print( + "WARNING: anaconda.org only recognizes about/readme " + "as README.md and README.rst", + file=sys.stderr, + ) def jsonify_info_yamls(m): @@ -741,17 +795,22 @@ def jsonify_info_yamls(m): for file in files: file = join(root, file) bn, ext = os.path.splitext(os.path.basename(file)) - if ext == '.yaml': - dst = join(m.config.info_dir, ijd, bn + '.json') + if ext == ".yaml": + dst = join(m.config.info_dir, ijd, bn + ".json") try: os.makedirs(os.path.dirname(dst)) except: pass - with open(file) as i, open(dst, 'w') as o: + with open(file) as i, open(dst, "w") as o: import yaml + yaml = yaml.full_load(i) - json.dump(yaml, o, sort_keys=True, indent=2, separators=(',', ': ')) - res.append(join(os.path.basename(m.config.info_dir), ijd, bn + '.json')) + json.dump( + yaml, o, sort_keys=True, indent=2, separators=(",", ": ") + ) + res.append( + join(os.path.basename(m.config.info_dir), ijd, bn + ".json") + ) return res @@ -764,7 +823,7 @@ def copy_license(m): def generic_copy(m, name, field): - all_files = utils.ensure_list(m.get_value(f'about/{field}', [])) + all_files = utils.ensure_list(m.get_value(f"about/{field}", [])) if not all_files: return count = 0 @@ -794,8 +853,9 @@ def generic_copy(m, name, field): filename = single_file utils.copy_into( src_file, - join(m.config.info_dir, f'{name}s', filename), m.config.timeout, - locking=m.config.locking + join(m.config.info_dir, f"{name}s", filename), + m.config.timeout, + locking=m.config.locking, ) else: raise ValueError( @@ -809,34 +869,39 @@ def generic_copy(m, name, field): def copy_recipe_log(m): # the purpose of this file is to capture some change history metadata that may tell people # why a given build was changed the way that it was - log_file = m.get_value('about/recipe_log_file') or "recipe_log.json" + log_file = m.get_value("about/recipe_log_file") or "recipe_log.json" # look in recipe folder first src_file = os.path.join(m.path, log_file) if not os.path.isfile(src_file): src_file = join(m.config.work_dir, log_file) if os.path.isfile(src_file): - utils.copy_into(src_file, - join(m.config.info_dir, 'recipe_log.json'), m.config.timeout, - locking=m.config.locking) + utils.copy_into( + src_file, + join(m.config.info_dir, "recipe_log.json"), + m.config.timeout, + locking=m.config.locking, + ) def copy_test_source_files(m, destination): - src_dir = '' + src_dir = "" if os.listdir(m.config.work_dir): src_dir = m.config.work_dir - elif hasattr(m.config, 'recipe_dir') and m.config.recipe_dir: - src_dir = os.path.join(m.config.recipe_dir, 'info', 'test') + elif hasattr(m.config, "recipe_dir") and m.config.recipe_dir: + src_dir = os.path.join(m.config.recipe_dir, "info", "test") src_dirs = [src_dir] - if os.path.isdir(os.path.join(src_dir, 'parent')): - src_dirs.append(os.path.join(src_dir, 'parent')) + if os.path.isdir(os.path.join(src_dir, "parent")): + src_dirs.append(os.path.join(src_dir, "parent")) for src_dir in src_dirs: if src_dir and os.path.isdir(src_dir) and src_dir != destination: - for pattern in utils.ensure_list(m.get_value('test/source_files', [])): - if utils.on_win and '\\' in pattern: - raise RuntimeError("test/source_files paths must use / " - "as the path delimiter on Windows") + for pattern in utils.ensure_list(m.get_value("test/source_files", [])): + if utils.on_win and "\\" in pattern: + raise RuntimeError( + "test/source_files paths must use / " + "as the path delimiter on Windows" + ) files = glob(join(src_dir, pattern)) if not files: msg = "Did not find any source_files for test with pattern {0}" @@ -845,17 +910,23 @@ def copy_test_source_files(m, destination): try: # disable locking to avoid locking a temporary directory (the extracted # test folder) - utils.copy_into(f, f.replace(src_dir, destination), m.config.timeout, - locking=False, clobber=True) + utils.copy_into( + f, + f.replace(src_dir, destination), + m.config.timeout, + locking=False, + clobber=True, + ) except OSError as e: log = utils.get_logger(__name__) - log.warn("Failed to copy {} into test files. Error was: {}".format(f, - str(e))) - for ext in '.pyc', '.pyo': + log.warn( + f"Failed to copy {f} into test files. Error was: {str(e)}" + ) + for ext in ".pyc", ".pyo": for f in utils.get_ext_files(destination, ext): os.remove(f) - recipe_test_files = m.get_value('test/files') + recipe_test_files = m.get_value("test/files") if recipe_test_files: orig_recipe_dir = m.path for pattern in recipe_test_files: @@ -863,17 +934,21 @@ def copy_test_source_files(m, destination): for f in files: basedir = orig_recipe_dir if not os.path.isfile(f): - basedir = os.path.join(orig_recipe_dir, 'parent') + basedir = os.path.join(orig_recipe_dir, "parent") dest = f.replace(basedir, destination) if f != dest: - utils.copy_into(f, f.replace(basedir, destination), - timeout=m.config.timeout, locking=m.config.locking, - clobber=True) + utils.copy_into( + f, + f.replace(basedir, destination), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) def write_hash_input(m): recipe_input = m.get_hash_contents() - with open(os.path.join(m.config.info_dir, 'hash_input.json'), 'w') as f: + with open(os.path.join(m.config.info_dir, "hash_input.json"), "w") as f: json.dump(recipe_input, f, indent=2) @@ -891,24 +966,33 @@ def get_all_replacements(variant): if isinstance(variant, Config): variant = variant.variant - if not variant or 'replacements' not in variant: + if not variant or "replacements" not in variant: # short circuit if no variant or no replacements keyword return [] - repl = variant['replacements'] - assert isinstance(repl, dict), f"Found 'replacements' ({repl}), but it is not a dict" - assert 'all_replacements' in repl, f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'" - - repl = repl['all_replacements'] - assert isinstance(repl, list), f"Found 'all_replacements' ({repl}), but it is not a list" + repl = variant["replacements"] + assert isinstance( + repl, dict + ), f"Found 'replacements' ({repl}), but it is not a dict" + assert ( + "all_replacements" in repl + ), f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'" + + repl = repl["all_replacements"] + assert isinstance( + repl, list + ), f"Found 'all_replacements' ({repl}), but it is not a list" if repl: - assert isinstance(repl[0], dict), f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict" + assert isinstance( + repl[0], dict + ), f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict" return repl def get_files_with_prefix(m, replacements, files_in, prefix): import time + start = time.time() # It is nonsensical to replace anything in a symlink. files = sorted(f for f in files_in if not os.path.islink(os.path.join(prefix, f))) @@ -918,111 +1002,124 @@ def get_files_with_prefix(m, replacements, files_in, prefix): if ignore_files is True: ignore_types.update((FileMode.text.name, FileMode.binary.name)) ignore_files = [] - if (not m.get_value('build/detect_binary_files_with_prefix', True if not utils.on_win else False) and - not m.get_value('build/binary_has_prefix_files', None)): + if not m.get_value( + "build/detect_binary_files_with_prefix", True if not utils.on_win else False + ) and not m.get_value("build/binary_has_prefix_files", None): ignore_types.update((FileMode.binary.name,)) - files_with_prefix = [(None, FileMode.binary.name if - open(os.path.join(prefix, f), 'rb+').read().find(b'\x00') != -1 else - FileMode.text.name, f) for f in files] + files_with_prefix = [ + ( + None, + FileMode.binary.name + if open(os.path.join(prefix, f), "rb+").read().find(b"\x00") != -1 + else FileMode.text.name, + f, + ) + for f in files + ] ignore_files.extend( - f[2] for f in files_with_prefix if (f[1] in ignore_types and - f[2] not in ignore_files) or prefix_replacement_excluded(os.path.join(prefix, f[2]))) + f[2] + for f in files_with_prefix + if (f[1] in ignore_types and f[2] not in ignore_files) + or prefix_replacement_excluded(os.path.join(prefix, f[2])) + ) files_with_prefix = [f for f in files_with_prefix if f[2] not in ignore_files] - prefix_u = prefix.replace('\\', '/') if utils.on_win else prefix + prefix_u = prefix.replace("\\", "/") if utils.on_win else prefix # If we've cross compiled on Windows to unix, chances are many files will refer to Windows # paths. - if utils.on_win or m.config.subdir.startswith('win'): + if utils.on_win or m.config.subdir.startswith("win"): # TODO :: Should we also handle MSYS2 paths (/c/blah) here? Probably! - pfx_variants = [prefix[0].upper() + prefix[1:], - prefix[0].lower() + prefix[1:], - prefix_u, - prefix_placeholder.replace('\\', '\''), - prefix_placeholder.replace('/', '\\')] + pfx_variants = [ + prefix[0].upper() + prefix[1:], + prefix[0].lower() + prefix[1:], + prefix_u, + PREFIX_PLACEHOLDER.replace("\\", "'"), + PREFIX_PLACEHOLDER.replace("/", "\\"), + ] # some python/json files store an escaped version of prefix - pfx_variants.extend([pfx.replace('\\', '\\\\') for pfx in pfx_variants]) + pfx_variants.extend([pfx.replace("\\", "\\\\") for pfx in pfx_variants]) else: - pfx_variants = (prefix, prefix_placeholder) + pfx_variants = (prefix, PREFIX_PLACEHOLDER) # replacing \ with \\ here is for regex escaping - re_test = b'(' + b'|'.join(v.encode('utf-8').replace(b'\\', b'\\\\') for v in pfx_variants) + b')' - pfx_matches = have_regex_files([f[2] for f in files_with_prefix], prefix=prefix, - tag='prefix', - regex_re=re_test, - # We definitely do not want this as a replacement_re as it'd replace - # /opt/anaconda1anaconda2anaconda3 with the prefix. As it happens we - # do not do any replacement at all here. - # replacement_re=prefix.encode('utf-8').replace(b'\\', b'\\\\'), - replacement_re=None, - also_binaries=True, - match_records={}, - debug=m.config.debug) + re_test = ( + b"(" + + b"|".join(v.encode("utf-8").replace(b"\\", b"\\\\") for v in pfx_variants) + + b")" + ) + pfx_matches = have_regex_files( + [f[2] for f in files_with_prefix], + prefix=prefix, + tag="prefix", + regex_re=re_test, + # We definitely do not want this as a replacement_re as it'd replace + # /opt/anaconda1anaconda2anaconda3 with the prefix. As it happens we + # do not do any replacement at all here. + # replacement_re=prefix.encode('utf-8').replace(b'\\', b'\\\\'), + replacement_re=None, + also_binaries=True, + match_records={}, + debug=m.config.debug, + ) prefixes_for_file = {} # This is for Windows mainly, though we may want to allow multiple searches at once in a file on # all OSes some-day. It is harmless to do this on all systems anyway. for filename, match in pfx_matches.items(): - prefixes_for_file[filename] = {sm['text'] for sm in match['submatches']} + prefixes_for_file[filename] = {sm["text"] for sm in match["submatches"]} files_with_prefix_new = [] - for (_, mode, filename) in files_with_prefix: + for _, mode, filename in files_with_prefix: np = filename if np in prefixes_for_file and np in pfx_matches: for pfx in prefixes_for_file[np]: - files_with_prefix_new.append((pfx.decode('utf-8'), mode, filename)) + files_with_prefix_new.append((pfx.decode("utf-8"), mode, filename)) files_with_prefix = files_with_prefix_new all_matches = {} # variant = m.config.variant if 'replacements' in m.config.variant else m.config.variants - replacement_tags = '' + replacement_tags = "" if len(replacements): last = len(replacements) - 1 for index, replacement in enumerate(replacements): - all_matches = have_regex_files(files=[f for f in files if any( - glob2.fnmatch.fnmatch(f, r) for r in replacement['glob_patterns'])], - prefix=prefix, - tag=replacement['tag'], - regex_re=replacement['regex_re'], - replacement_re=replacement['replacement_re'], - match_records=all_matches, - regex_rg=replacement['regex_rg'] if 'regex_rg' in replacement else None, - debug=m.config.debug) - replacement_tags = replacement_tags + '"' + replacement['tag'] + ('"' if - index == last else '", ') + all_matches = have_regex_files( + files=[ + file + for file in files + if any( + fnmatch.fnmatch(file, pattern) + for pattern in replacement["glob_patterns"] + ) + ], + prefix=prefix, + tag=replacement["tag"], + regex_re=replacement["regex_re"], + replacement_re=replacement["replacement_re"], + match_records=all_matches, + regex_rg=replacement["regex_rg"] if "regex_rg" in replacement else None, + debug=m.config.debug, + ) + replacement_tags = ( + replacement_tags + + '"' + + replacement["tag"] + + ('"' if index == last else '", ') + ) perform_replacements(all_matches, prefix) end = time.time() - total_replacements = sum(map(lambda i: len(all_matches[i]['submatches']), all_matches)) - print("INFO :: Time taken to mark (prefix){}\n" - " {} replacements in {} files was {:.2f} seconds".format( - f" and mark+peform ({replacement_tags})" if replacement_tags else '', - total_replacements, len(all_matches), end - start)) - ''' - # Keeping this around just for a while. - files_with_prefix2 = sorted(have_prefix_files(files_in, prefix)) - end = time.time() - print("INFO :: Time taken to do replacements (prefix only) was: {}".format(end - start)) - - ignore_files = m.ignore_prefix_files() - ignore_types = set() - if not hasattr(ignore_files, "__iter__"): - if ignore_files is True: - ignore_types.update((FileMode.text.name, FileMode.binary.name)) - ignore_files = [] - if (not m.get_value('build/detect_binary_files_with_prefix', True) and - not m.get_value('build/binary_has_prefix_files', None)): - ignore_types.update((FileMode.binary.name,)) - # files_with_prefix is a list of tuples containing (prefix_placeholder, file_type, file_path) - ignore_files.extend( - f[2] for f in files_with_prefix2 if f[1] in ignore_types and f[2] not in ignore_files) - files_with_prefix2 = [f for f in files_with_prefix2 if f[2] not in ignore_files] - end2 = time.time() - print("INFO :: Time taken to do replacements (prefix only) was: {}".format(end2 - start2)) - files1 = set([f for _, _, f in files_with_prefix]) - files2 = set([f for _, _, f in files_with_prefix2]) - assert not (files2 - files1), "New ripgrep prefix search missed the following files:\n{}\n".format(files2 - files1) - ''' + total_replacements = sum( + map(lambda i: len(all_matches[i]["submatches"]), all_matches) + ) + print( + "INFO :: Time taken to mark (prefix){}\n" + " {} replacements in {} files was {:.2f} seconds".format( + f" and mark+peform ({replacement_tags})" if replacement_tags else "", + total_replacements, + len(all_matches), + end - start, + ) + ) return sorted(files_with_prefix) def record_prefix_files(m, files_with_prefix): - filtered = [] if not files_with_prefix: return filtered @@ -1045,42 +1142,61 @@ def record_prefix_files(m, files_with_prefix): # Don't do it everywhere because paths on Unix can contain quotes, # and we don't have a good method of escaping, and because older # versions of conda don't support quotes in has_prefix - fmt_str = '%s %s %s\n' + fmt_str = "%s %s %s\n" print("Files containing CONDA_PREFIX") print("-----------------------------") - detect_binary_files_with_prefix = m.get_value('build/detect_binary_files_with_prefix', - not len_binary_has_prefix_files and not utils.on_win) - with open(join(m.config.info_dir, 'has_prefix'), 'w') as fo: + detect_binary_files_with_prefix = m.get_value( + "build/detect_binary_files_with_prefix", + not len_binary_has_prefix_files and not utils.on_win, + ) + with open(join(m.config.info_dir, "has_prefix"), "w") as fo: for pfix, mode, fn in files_with_prefix: ignored_because = None - if (fn in binary_has_prefix_files or ((not len_binary_has_prefix_files or - detect_binary_files_with_prefix) and mode == 'binary')): + if fn in binary_has_prefix_files or ( + (not len_binary_has_prefix_files or detect_binary_files_with_prefix) + and mode == "binary" + ): if fn in binary_has_prefix_files: - if mode != 'binary': - mode = 'binary' - elif fn in binary_has_prefix_files and detect_binary_files_with_prefix: - print("File {} force-identified as 'binary', " - "But it is 'binary' anyway, suggest removing it from " - "`build/binary_has_prefix_files`".format(fn)) + if mode != "binary": + mode = "binary" + elif ( + fn in binary_has_prefix_files + and detect_binary_files_with_prefix + ): + print( + f"File {fn} force-identified as 'binary', " + "But it is 'binary' anyway, suggest removing it from " + "`build/binary_has_prefix_files`" + ) if fn in binary_has_prefix_files: binary_has_prefix_files.remove(fn) - elif (fn in text_has_prefix_files or (not len_text_has_prefix_files and mode == 'text') or - os.path.dirname(fn) == 'python-scripts'): - if mode != 'text': - mode = 'text' + elif ( + fn in text_has_prefix_files + or (not len_text_has_prefix_files and mode == "text") + or os.path.dirname(fn) == "python-scripts" + ): + if mode != "text": + mode = "text" elif fn in text_has_prefix_files and not len_text_has_prefix_files: - print("File {} force-identified as 'text', " - "But it is 'text' anyway, suggest removing it from " - "`build/has_prefix_files`".format(fn)) + print( + f"File {fn} force-identified as 'text', " + "But it is 'text' anyway, suggest removing it from " + "`build/has_prefix_files`" + ) if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: - ignored_because = " (not in build/%s_has_prefix_files)" % (mode) - - print("{fn} ({mode}): {action}{reason}".format(fn=fn, mode=mode, - action="Ignoring" if ignored_because else "Patching", - reason=ignored_because if ignored_because else "")) + ignored_because = f" (not in build/{mode}_has_prefix_files)" + + print( + "{fn} ({mode}): {action}{reason}".format( + fn=fn, + mode=mode, + action="Ignoring" if ignored_because else "Patching", + reason=ignored_because if ignored_because else "", + ) + ) if ignored_because is None: fo.write(fmt_str % (pfix, mode, fn)) filtered.append((pfix, mode, fn)) @@ -1088,9 +1204,11 @@ def record_prefix_files(m, files_with_prefix): # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f + errstr += f"Did not detect hard-coded path in {f} from has_prefix_files\n" for f in binary_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + errstr += ( + f"Did not detect hard-coded path in {f} from binary_has_prefix_files\n" + ) if errstr: raise RuntimeError(errstr) @@ -1098,41 +1216,41 @@ def record_prefix_files(m, files_with_prefix): def sanitize_channel(channel): - return get_conda_channel(channel).urls(with_credentials=False, subdirs=[''])[0] + return Channel.from_value(channel).urls(with_credentials=False, subdirs=[""])[0] def write_info_files_file(m, files): - entry_point_scripts = m.get_value('build/entry_points') + entry_point_scripts = m.get_value("build/entry_points") entry_point_script_names = get_entry_point_script_names(entry_point_scripts) - mode_dict = {'mode': 'w', 'encoding': 'utf-8'} - with open(join(m.config.info_dir, 'files'), **mode_dict) as fo: - if m.noarch == 'python': + mode_dict = {"mode": "w", "encoding": "utf-8"} + with open(join(m.config.info_dir, "files"), **mode_dict) as fo: + if m.noarch == "python": for f in sorted(files): if f.find("site-packages") >= 0: - fo.write(f[f.find("site-packages"):] + '\n') + fo.write(f[f.find("site-packages") :] + "\n") elif f.startswith("bin") and (f not in entry_point_script_names): - fo.write(f.replace("bin", "python-scripts") + '\n') + fo.write(f.replace("bin", "python-scripts") + "\n") elif f.startswith("Scripts") and (f not in entry_point_script_names): - fo.write(f.replace("Scripts", "python-scripts") + '\n') + fo.write(f.replace("Scripts", "python-scripts") + "\n") else: - fo.write(f + '\n') + fo.write(f + "\n") else: for f in sorted(files): - fo.write(f + '\n') + fo.write(f + "\n") def write_link_json(m): package_metadata = OrderedDict() - noarch_type = m.get_value('build/noarch') + noarch_type = m.get_value("build/noarch") if noarch_type: noarch_type_str = str(noarch_type) noarch_dict = OrderedDict(type=noarch_type_str) if noarch_type_str.lower() == "python": - entry_points = m.get_value('build/entry_points') + entry_points = m.get_value("build/entry_points") if entry_points: - noarch_dict['entry_points'] = entry_points - package_metadata['noarch'] = noarch_dict + noarch_dict["entry_points"] = entry_points + package_metadata["noarch"] = noarch_dict preferred_env = m.get_value("build/preferred_env") if preferred_env: @@ -1146,102 +1264,110 @@ def write_link_json(m): # now changed it to info/link.json. Still, we must indefinitely keep the key name # package_metadata_version, or we break conda. package_metadata["package_metadata_version"] = 1 - with open(os.path.join(m.config.info_dir, "link.json"), 'w') as fh: - fh.write(json.dumps(package_metadata, sort_keys=True, indent=2, separators=(',', ': '))) + with open(os.path.join(m.config.info_dir, "link.json"), "w") as fh: + fh.write( + json.dumps( + package_metadata, sort_keys=True, indent=2, separators=(",", ": ") + ) + ) def write_about_json(m): - with open(join(m.config.info_dir, 'about.json'), 'w') as fo: + with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value('about/%s' % key) + value = m.get_value(f"about/{key}") if value: d[key] = value if default is list: d[key] = utils.ensure_list(value) # for sake of reproducibility, record some conda info - d['conda_version'] = conda_version - d['conda_build_version'] = conda_build_version + d["conda_version"] = conda_version + d["conda_build_version"] = conda_build_version # conda env will be in most, but not necessarily all installations. # Don't die if we don't see it. stripped_channels = [] - for channel in get_rc_urls() + list(m.config.channel_urls): + for channel in (*context.channels, *m.config.channel_urls): stripped_channels.append(sanitize_channel(channel)) - d['channels'] = stripped_channels - evars = ['CIO_TEST'] + d["channels"] = stripped_channels + evars = ["CIO_TEST"] - d['env_vars'] = {ev: os.getenv(ev, '') for ev in evars} + d["env_vars"] = {ev: os.getenv(ev, "") for ev in evars} # Adding this to extra since its arbitrary info - extra = m.get_section('extra') + extra = m.get_section("extra") # Add burn-in information to extra if m.config.extra_meta: + log = utils.get_logger(__name__) + log.info( + "Adding the following extra-meta data to about.json: %s", + m.config.extra_meta, + ) extra.update(m.config.extra_meta) - env = environ.Environment(root_dir) - d['root_pkgs'] = env.package_specs() + d["root_pkgs"] = [ + f"{prec.name} {prec.version} {prec.build}" + for prec in PrefixData(context.root_prefix).iter_records() + ] # Include the extra section of the metadata in the about.json - d['extra'] = extra + d["extra"] = extra json.dump(d, fo, indent=2, sort_keys=True) -def write_info_json(m): +def write_info_json(m: MetaData): info_index = m.info_index() if m.pin_depends: # Wtih 'strict' depends, we will have pinned run deps during rendering - if m.pin_depends == 'strict': - runtime_deps = m.meta.get('requirements', {}).get('run', []) - info_index['depends'] = runtime_deps + if m.pin_depends == "strict": + runtime_deps = m.get_value("requirements/run", []) + info_index["depends"] = runtime_deps else: - runtime_deps = environ.get_pinned_deps(m, 'run') - with open(join(m.config.info_dir, 'requires'), 'w') as fo: + runtime_deps = environ.get_pinned_deps(m, "run") + with open(join(m.config.info_dir, "requires"), "w") as fo: fo.write( "# This file as created when building:\n" "#\n" - "# {}.tar.bz2 (on '{}')\n" + f"# {m.dist()}.tar.bz2 (on '{m.config.build_subdir}')\n" "#\n" "# It can be used to create the runtime environment of this package using:\n" - "# $ conda create --name --file ".format( - m.dist(), - m.config.build_subdir, - ) + "# $ conda create --name --file " ) - for dist in sorted(runtime_deps + [' '.join(m.dist().rsplit('-', 2))]): - fo.write('%s\n' % '='.join(dist.split())) + for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): + fo.write("{}\n".format("=".join(dist.split()))) - mode_dict = {'mode': 'w', 'encoding': 'utf-8'} - with open(join(m.config.info_dir, 'index.json'), **mode_dict) as fo: + mode_dict = {"mode": "w", "encoding": "utf-8"} + with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: json.dump(info_index, fo, indent=2, sort_keys=True) def write_no_link(m, files): - no_link = m.get_value('build/no_link') + no_link = m.get_value("build/no_link") if no_link: if not isinstance(no_link, list): no_link = [no_link] - with open(join(m.config.info_dir, 'no_link'), 'w') as fo: + with open(join(m.config.info_dir, "no_link"), "w") as fo: for f in files: if any(fnmatch.fnmatch(f, p) for p in no_link): - fo.write(f + '\n') + fo.write(f + "\n") def get_entry_point_script_names(entry_point_scripts): scripts = [] for entry_point in entry_point_scripts: - cmd = entry_point[:entry_point.find("=")].strip() + cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: - scripts.append("Scripts\\%s-script.py" % cmd) - scripts.append("Scripts\\%s.exe" % cmd) + scripts.append(f"Scripts\\{cmd}-script.py") + scripts.append(f"Scripts\\{cmd}.exe") else: - scripts.append("bin/%s" % cmd) + scripts.append(f"bin/{cmd}") return scripts -def write_run_exports(m): - run_exports = m.meta.get('build', {}).get('run_exports', {}) +def write_run_exports(m: MetaData): + run_exports = m.get_value("build/run_exports", {}) if run_exports: - with open(os.path.join(m.config.info_dir, 'run_exports.json'), 'w') as f: - if not hasattr(run_exports, 'keys'): - run_exports = {'weak': run_exports} + with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f: + if not hasattr(run_exports, "keys"): + run_exports = {"weak": run_exports} for k in utils.RUN_EXPORTS_TYPES: if k in run_exports: run_exports[k] = utils.ensure_list(run_exports[k]) @@ -1249,17 +1375,17 @@ def write_run_exports(m): def create_info_files(m, replacements, files, prefix): - ''' + """ Creates the metadata files that will be stored in the built package. :param m: Package metadata :type m: Metadata :param files: Paths to files to include in package :type files: list of str - ''' + """ if utils.on_win: # make sure we use '/' path separators in metadata - files = [_f.replace('\\', '/') for _f in files] + files = [_f.replace("\\", "/") for _f in files] if m.config.filename_hashing: write_hash_input(m) @@ -1275,47 +1401,59 @@ def create_info_files(m, replacements, files, prefix): copy_recipe_log(m) files.extend(jsonify_info_yamls(m)) - create_all_test_files(m, test_dir=join(m.config.info_dir, 'test')) + create_all_test_files(m, test_dir=join(m.config.info_dir, "test")) if m.config.copy_test_source_files: - copy_test_source_files(m, join(m.config.info_dir, 'test')) + copy_test_source_files(m, join(m.config.info_dir, "test")) write_info_files_file(m, files) files_with_prefix = get_files_with_prefix(m, replacements, files, prefix) files_with_prefix = record_prefix_files(m, files_with_prefix) - checksums = create_info_files_json_v1(m, m.config.info_dir, prefix, files, files_with_prefix) + checksums = create_info_files_json_v1( + m, m.config.info_dir, prefix, files, files_with_prefix + ) write_no_link(m, files) - sources = m.get_section('source') - if hasattr(sources, 'keys'): - sources = [sources] - - with open(join(m.config.info_dir, 'git'), 'w', encoding='utf-8') as fo: - for src in sources: - if src.get('git_url'): - source.git_info(os.path.join(m.config.work_dir, src.get('folder', '')), - m.config.build_prefix, git=None, verbose=m.config.verbose, fo=fo) + with open(join(m.config.info_dir, "git"), "w", encoding="utf-8") as fo: + for source_dict in m.get_section("source"): + if source_dict.get("git_url"): + source.git_info( + os.path.join(m.config.work_dir, source_dict.get("folder", "")), + m.config.build_prefix, + git=None, + verbose=m.config.verbose, + fo=fo, + ) - if m.get_value('app/icon'): - utils.copy_into(join(m.path, m.get_value('app/icon')), - join(m.config.info_dir, 'icon.png'), - m.config.timeout, locking=m.config.locking) + if m.get_value("app/icon"): + utils.copy_into( + join(m.path, m.get_value("app/icon")), + join(m.config.info_dir, "icon.png"), + m.config.timeout, + locking=m.config.locking, + ) return checksums def get_short_path(m, target_file): - if m.noarch == 'python': - entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) + if m.noarch == "python": + entry_point_script_names = get_entry_point_script_names( + m.get_value("build/entry_points") + ) if target_file.find("site-packages") >= 0: - return target_file[target_file.find("site-packages"):] - elif target_file.startswith("bin") and (target_file not in entry_point_script_names): + return target_file[target_file.find("site-packages") :] + elif target_file.startswith("bin") and ( + target_file not in entry_point_script_names + ): return target_file.replace("bin", "python-scripts") - elif target_file.startswith("Scripts") and (target_file not in entry_point_script_names): + elif target_file.startswith("Scripts") and ( + target_file not in entry_point_script_names + ): return target_file.replace("Scripts", "python-scripts") else: return target_file - elif m.get_value('build/noarch_python', None): + elif m.get_value("build/noarch_python", None): return None else: return target_file @@ -1341,8 +1479,11 @@ def get_inode(file): def get_inode_paths(files, target_short_path, prefix): utils.ensure_list(files) target_short_path_inode = get_inode(join(prefix, target_short_path)) - hardlinked_files = [sp for sp in files - if os.lstat(join(prefix, sp)).st_ino == target_short_path_inode] + hardlinked_files = [ + sp + for sp in files + if os.lstat(join(prefix, sp)).st_ino == target_short_path_inode + ] return sorted(hardlinked_files) @@ -1379,14 +1520,14 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn('file %s is a symlink with no target' % path, UserWarning) + warnings.warn(f"file {path} is a symlink with no target", UserWarning) return 0 return 0 def build_info_files_json_v1(m, prefix, files, files_with_prefix): - no_link_files = m.get_value('build/no_link') + no_link_files = m.get_value("build/no_link") files_json = [] files_inodes = get_inodes(files, prefix) for fi in sorted(files): @@ -1394,7 +1535,7 @@ def build_info_files_json_v1(m, prefix, files, files_with_prefix): path = os.path.join(prefix, fi) short_path = get_short_path(m, fi) if short_path: - short_path = short_path.replace('\\', '/').replace('\\\\', '/') + short_path = short_path.replace("\\", "/").replace("\\\\", "/") file_info = { "_path": short_path, "sha256": utils.sha256_checksum(path), @@ -1412,10 +1553,16 @@ def build_info_files_json_v1(m, prefix, files, files_with_prefix): if prefix_placeholder and file_mode: file_info["prefix_placeholder"] = prefix_placeholder file_info["file_mode"] = file_mode - if file_info.get("path_type") == PathType.hardlink and CrossPlatformStLink.st_nlink( - path) > 1: + if ( + file_info.get("path_type") == PathType.hardlink + and os.stat(path).st_nlink > 1 + ): target_short_path_inode = get_inode(path) - inode_paths = [files[index] for index, ino in enumerate(files_inodes) if ino == target_short_path_inode] + inode_paths = [ + files[index] + for index, ino in enumerate(files_inodes) + if ino == target_short_path_inode + ] file_info["inode_paths"] = inode_paths files_json.append(file_info) return files_json @@ -1432,20 +1579,26 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix): # don't create info/paths.json file if this is an old noarch package if not m.noarch_python: - with open(join(info_dir, 'paths.json'), "w") as files_json: - json.dump(files_json_info, files_json, sort_keys=True, indent=2, separators=(',', ': '), - cls=EntityEncoder) + with open(join(info_dir, "paths.json"), "w") as files_json: + json.dump( + files_json_info, + files_json, + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) # Return a dict of file: sha1sum. We could (but currently do not) # use this to detect overlap and mutated overlap. checksums = dict() for file in files_json_files: - checksums[file['_path']] = file['sha256'] + checksums[file["_path"]] = file["sha256"] return checksums -def post_process_files(m, initial_prefix_files): - package_name = m.get_value('package/name') +def post_process_files(m: MetaData, initial_prefix_files): + package_name = m.name() host_prefix = m.config.host_prefix missing = [] for f in initial_prefix_files: @@ -1453,68 +1606,79 @@ def post_process_files(m, initial_prefix_files): missing.append(f) if len(missing): log = utils.get_logger(__name__) - log.warning("The install/build script(s) for {} deleted the following " - "files (from dependencies) from the prefix:\n{}\n" - "This will cause the post-link checks to mis-report. Please " - "try not to delete and files (DSOs in particular) from the " - "prefix".format(package_name, missing)) + log.warning( + f"The install/build script(s) for {package_name} deleted the following " + f"files (from dependencies) from the prefix:\n{missing}\n" + "This will cause the post-link checks to mis-report. Please " + "try not to delete and files (DSOs in particular) from the " + "prefix" + ) get_build_metadata(m) create_post_scripts(m) # this is new-style noarch, with a value of 'python' - if m.noarch != 'python': - utils.create_entry_points(m.get_value('build/entry_points'), config=m.config) + if m.noarch != "python": + utils.create_entry_points(m.get_value("build/entry_points"), config=m.config) current_prefix_files = utils.prefix_files(prefix=host_prefix) - python = (m.config.build_python if os.path.isfile(m.config.build_python) else - m.config.host_python) - post_process(package_name, m.get_value('package/version'), - sorted(current_prefix_files - initial_prefix_files), - prefix=host_prefix, - config=m.config, - preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), - noarch=m.get_value('build/noarch'), - skip_compile_pyc=m.get_value('build/skip_compile_pyc')) + python = ( + m.config.build_python + if os.path.isfile(m.config.build_python) + else m.config.host_python + ) + post_process( + package_name, + m.version(), + sorted(current_prefix_files - initial_prefix_files), + prefix=host_prefix, + config=m.config, + preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")), + noarch=m.get_value("build/noarch"), + skip_compile_pyc=m.get_value("build/skip_compile_pyc"), + ) # The post processing may have deleted some files (like easy-install.pth) current_prefix_files = utils.prefix_files(prefix=host_prefix) new_files = sorted(current_prefix_files - initial_prefix_files) - ''' + """ if m.noarch == 'python' and m.config.subdir == 'win-32': # Delete any PIP-created .exe launchers and fix entry_points.txt # .. but we need to provide scripts instead here. - from conda_build.post import caseless_sepless_fnmatch + from .post import caseless_sepless_fnmatch exes = caseless_sepless_fnmatch(new_files, 'Scripts/*.exe') for ff in exes: os.unlink(os.path.join(m.config.host_prefix, ff)) new_files.remove(ff) - ''' + """ new_files = utils.filter_files(new_files, prefix=host_prefix) meta_dir = m.config.meta_dir if any(meta_dir in join(host_prefix, f) for f in new_files): - meta_files = (tuple(f for f in new_files if m.config.meta_dir in - join(host_prefix, f)),) + meta_files = ( + tuple(f for f in new_files if m.config.meta_dir in join(host_prefix, f)), + ) sys.exit( - "Error: Untracked file(s) {} found in conda-meta directory. This error usually comes " + f"Error: Untracked file(s) {meta_files} found in conda-meta directory. This error usually comes " "from using conda in the build script. Avoid doing this, as it can lead to packages " - "that include their dependencies.".format( - meta_files, - ) + "that include their dependencies." ) post_build(m, new_files, build_python=python) - entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) - if m.noarch == 'python': + entry_point_script_names = get_entry_point_script_names( + m.get_value("build/entry_points") + ) + if m.noarch == "python": pkg_files = [fi for fi in new_files if fi not in entry_point_script_names] else: pkg_files = new_files # the legacy noarch - if m.get_value('build/noarch_python'): + if m.get_value("build/noarch_python"): noarch_python.transform(m, new_files, host_prefix) # new way: build/noarch: python - elif m.noarch == 'python': - noarch_python.populate_files(m, pkg_files, host_prefix, entry_point_script_names) + elif m.noarch == "python": + noarch_python.populate_files( + m, pkg_files, host_prefix, entry_point_script_names + ) current_prefix_files = utils.prefix_files(prefix=host_prefix) new_files = current_prefix_files - initial_prefix_files @@ -1523,11 +1687,11 @@ def post_process_files(m, initial_prefix_files): return new_files -def bundle_conda(output, metadata, env, stats, **kw): +def bundle_conda(output, metadata: MetaData, env, stats, **kw): log = utils.get_logger(__name__) - log.info('Packaging %s', metadata.dist()) + log.info("Packaging %s", metadata.dist()) get_all_replacements(metadata.config) - files = output.get('files', []) + files = output.get("files", []) # this is because without any requirements at all, we still need to have the host prefix exist try: @@ -1536,114 +1700,142 @@ def bundle_conda(output, metadata, env, stats, **kw): pass # Use script from recipe? - script = utils.ensure_list(metadata.get_value('build/script', None)) + script = utils.ensure_list(metadata.get_value("build/script", None)) # need to treat top-level stuff specially. build/script in top-level stuff should not be # re-run for an output with a similar name to the top-level recipe - is_output = 'package:' not in metadata.get_recipe_text() + is_output = "package:" not in metadata.get_recipe_text() # metadata.get_top_level_recipe_without_outputs is destructive to replacements. replacements = get_all_replacements(metadata.config) - top_build = metadata.get_top_level_recipe_without_outputs().get('build', {}) or {} + top_build = metadata.get_top_level_recipe_without_outputs().get("build", {}) or {} activate_script = metadata.activate_build_script - if (script and not output.get('script')) and (is_output or not top_build.get('script')): + if (script and not output.get("script")) and ( + is_output or not top_build.get("script") + ): # do add in activation, but only if it's not disabled activate_script = metadata.config.activate - script = '\n'.join(script) + script = "\n".join(script) suffix = "bat" if utils.on_win else "sh" - script_fn = output.get('script') or f'output_script.{suffix}' - with open(os.path.join(metadata.config.work_dir, script_fn), 'w') as f: - f.write('\n') + script_fn = output.get("script") or f"output_script.{suffix}" + with open(os.path.join(metadata.config.work_dir, script_fn), "w") as f: + f.write("\n") f.write(script) - f.write('\n') - output['script'] = script_fn + f.write("\n") + output["script"] = script_fn - if output.get('script'): + if output.get("script"): env = environ.get_dict(m=metadata) - interpreter = output.get('script_interpreter') + interpreter = output.get("script_interpreter") if not interpreter: - interpreter_and_args = guess_interpreter(output['script']) - interpreter_and_args[0] = external.find_executable(interpreter_and_args[0], - metadata.config.build_prefix) - if not interpreter_and_args[0]: - log.error("Did not find an interpreter to run {}, looked for {}".format( - output['script'], interpreter_and_args[0])) - if 'system32' in interpreter_and_args[0] and 'bash' in interpreter_and_args[0]: - print("ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" - " use MSYS2 packages. Add `m2-base` and more (depending on what your" - " script needs) to `requirements/build` instead.") + args = list(guess_interpreter(output["script"])) + args[0] = external.find_executable(args[0], metadata.config.build_prefix) + if not args[0]: + log.error( + "Did not find an interpreter to run %s, looked for %s", + output["script"], + args[0], + ) + if "system32" in args[0] and "bash" in args[0]: + print( + "ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" + " use MSYS2 packages. Add `m2-base` and more (depending on what your" + " script needs) to `requirements/build` instead." + ) sys.exit(1) else: - interpreter_and_args = interpreter.split(' ') + args = interpreter.split(" ") initial_files = utils.prefix_files(metadata.config.host_prefix) env_output = env.copy() - env_output['TOP_PKG_NAME'] = env['PKG_NAME'] - env_output['TOP_PKG_VERSION'] = env['PKG_VERSION'] - env_output['PKG_VERSION'] = metadata.version() - env_output['PKG_NAME'] = metadata.get_value('package/name') - env_output['RECIPE_DIR'] = metadata.path - env_output['MSYS2_PATH_TYPE'] = 'inherit' - env_output['CHERE_INVOKING'] = '1' - for var in utils.ensure_list(metadata.get_value('build/script_env')): - if '=' in var: - val = var.split('=', 1)[1] - var = var.split('=', 1)[0] + env_output["TOP_PKG_NAME"] = env["PKG_NAME"] + env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"] + env_output["PKG_VERSION"] = metadata.version() + env_output["PKG_NAME"] = metadata.name() + env_output["RECIPE_DIR"] = metadata.path + env_output["MSYS2_PATH_TYPE"] = "inherit" + env_output["CHERE_INVOKING"] = "1" + for var in utils.ensure_list(metadata.get_value("build/script_env")): + if "=" in var: + val = var.split("=", 1)[1] + var = var.split("=", 1)[0] elif var not in os.environ: - raise ValueError("env var '{}' specified in script_env, but is not set." - .format(var)) + warnings.warn( + f"The environment variable '{var}' specified in script_env is undefined.", + UserWarning, + ) + val = "" else: val = os.environ[var] env_output[var] = val - dest_file = os.path.join(metadata.config.work_dir, output['script']) - utils.copy_into(os.path.join(metadata.path, output['script']), dest_file) + dest_file = os.path.join(metadata.config.work_dir, output["script"]) + utils.copy_into(os.path.join(metadata.path, output["script"]), dest_file) from os import stat + st = stat(dest_file) os.chmod(dest_file, st.st_mode | 0o200) if activate_script: _write_activation_text(dest_file, metadata) bundle_stats = {} - utils.check_call_env(interpreter_and_args + [dest_file], - cwd=metadata.config.work_dir, env=env_output, stats=bundle_stats) + utils.check_call_env( + [*args, dest_file], + cwd=metadata.config.work_dir, + env=env_output, + stats=bundle_stats, + ) log_stats(bundle_stats, f"bundling {metadata.name()}") if stats is not None: - stats[stats_key(metadata, f'bundle_{metadata.name()}')] = bundle_stats + stats[stats_key(metadata, f"bundle_{metadata.name()}")] = bundle_stats if files: # Files is specified by the output # we exclude the list of files that we want to keep, so post-process picks them up as "new" - keep_files = {os.path.normpath(pth) - for pth in utils.expand_globs(files, metadata.config.host_prefix)} + keep_files = { + os.path.normpath(pth) + for pth in utils.expand_globs(files, metadata.config.host_prefix) + } pfx_files = set(utils.prefix_files(metadata.config.host_prefix)) - initial_files = {item for item in (pfx_files - keep_files) - if not any(keep_file.startswith(item + os.path.sep) - for keep_file in keep_files)} - elif not output.get('script'): + initial_files = { + item + for item in (pfx_files - keep_files) + if not any( + keep_file.startswith(item + os.path.sep) for keep_file in keep_files + ) + } + elif not output.get("script"): if not metadata.always_include_files(): - log.warn("No files or script found for output {}".format(output.get('name'))) - build_deps = metadata.get_value('requirements/build') - host_deps = metadata.get_value('requirements/host') + log.warn( + "No files or script found for output {}".format(output.get("name")) + ) + build_deps = metadata.get_value("requirements/build") + host_deps = metadata.get_value("requirements/host") build_pkgs = [pkg.split()[0] for pkg in build_deps] host_pkgs = [pkg.split()[0] for pkg in host_deps] - dangerous_double_deps = {'python': 'PYTHON', 'r-base': 'R'} + dangerous_double_deps = {"python": "PYTHON", "r-base": "R"} for dep, env_var_name in dangerous_double_deps.items(): if all(dep in pkgs_list for pkgs_list in (build_pkgs, host_pkgs)): - raise CondaBuildException("Empty package; {0} present in build and host deps. " - "You probably picked up the build environment's {0} " - " executable. You need to alter your recipe to " - " use the {1} env var in your recipe to " - "run that executable.".format(dep, env_var_name)) - elif (dep in build_pkgs and metadata.uses_new_style_compiler_activation): - link = ("https://conda.io/docs/user-guide/tasks/build-packages/" - "define-metadata.html#host") - raise CondaBuildException("Empty package; {0} dep present in build but not " - "host requirements. You need to move your {0} dep " - "to the host requirements section. See {1} for more " - "info." .format(dep, link)) + raise CondaBuildException( + f"Empty package; {dep} present in build and host deps. " + f"You probably picked up the build environment's {dep} " + " executable. You need to alter your recipe to " + f" use the {env_var_name} env var in your recipe to " + "run that executable." + ) + elif dep in build_pkgs and metadata.uses_new_style_compiler_activation: + link = ( + "https://conda.io/docs/user-guide/tasks/build-packages/" + "define-metadata.html#host" + ) + raise CondaBuildException( + f"Empty package; {dep} dep present in build but not " + f"host requirements. You need to move your {dep} dep " + f"to the host requirements section. See {link} for more " + "info." + ) initial_files = set(utils.prefix_files(metadata.config.host_prefix)) for pat in metadata.always_include_files(): @@ -1657,36 +1849,48 @@ def bundle_conda(output, metadata, env, stats, **kw): log.warn("Glob %s from always_include_files does not match any files", pat) files = post_process_files(metadata, initial_files) - if output.get('name') and output.get('name') != 'conda': - assert 'bin/conda' not in files and 'Scripts/conda.exe' not in files, ("Bug in conda-build " + if output.get("name") and output.get("name") != "conda": + assert "bin/conda" not in files and "Scripts/conda.exe" not in files, ( + "Bug in conda-build " "has included conda binary in package. Please report this on the conda-build issue " - "tracker.") + "tracker." + ) # first filter is so that info_files does not pick up ignored files files = utils.filter_files(files, prefix=metadata.config.host_prefix) # this is also copying things like run_test.sh into info/recipe - utils.rm_rf(os.path.join(metadata.config.info_dir, 'test')) + utils.rm_rf(os.path.join(metadata.config.info_dir, "test")) with tmp_chdir(metadata.config.host_prefix): - output['checksums'] = create_info_files(metadata, replacements, files, prefix=metadata.config.host_prefix) + output["checksums"] = create_info_files( + metadata, replacements, files, prefix=metadata.config.host_prefix + ) # here we add the info files into the prefix, so we want to re-collect the files list prefix_files = set(utils.prefix_files(metadata.config.host_prefix)) - files = utils.filter_files(prefix_files - initial_files, prefix=metadata.config.host_prefix) + files = utils.filter_files( + prefix_files - initial_files, prefix=metadata.config.host_prefix + ) - basename = '-'.join([output['name'], metadata.version(), metadata.build_id()]) + basename = "-".join([output["name"], metadata.version(), metadata.build_id()]) tmp_archives = [] final_outputs = [] cph_kwargs = {} ext = CONDA_PACKAGE_EXTENSION_V1 - if (output.get('type') == 'conda_v2' or metadata.config.conda_pkg_format == "2"): + if output.get("type") == "conda_v2" or metadata.config.conda_pkg_format == "2": ext = CONDA_PACKAGE_EXTENSION_V2 cph_kwargs["compression_tuple"] = ( - '.tar.zst', 'zstd', f'zstd:compression-level={metadata.config.zstd_compression_level}' + ".tar.zst", + "zstd", + f"zstd:compression-level={metadata.config.zstd_compression_level}", ) with TemporaryDirectory() as tmp: conda_package_handling.api.create( - metadata.config.host_prefix, files, basename + ext, out_folder=tmp, **cph_kwargs + metadata.config.host_prefix, + files, + basename + ext, + out_folder=tmp, + **cph_kwargs, ) tmp_archives = [os.path.join(tmp, basename + ext)] @@ -1701,38 +1905,55 @@ def bundle_conda(output, metadata, env, stats, **kw): from conda_verify.verify import Verify except ImportError: Verify = None - log.warn("Importing conda-verify failed. Please be sure to test your packages. " - "conda install conda-verify to make this message go away.") + log.warn( + "Importing conda-verify failed. Please be sure to test your packages. " + "conda install conda-verify to make this message go away." + ) if getattr(metadata.config, "verify", False) and Verify: verifier = Verify() - checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) + - metadata.ignore_verify_codes()) + checks_to_ignore = ( + utils.ensure_list(metadata.config.ignore_verify_codes) + + metadata.ignore_verify_codes() + ) try: - verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore, - exit_on_error=metadata.config.exit_on_verify_error) + verifier.verify_package( + path_to_package=tmp_path, + checks_to_ignore=checks_to_ignore, + exit_on_error=metadata.config.exit_on_verify_error, + ) except KeyError as e: - log.warn("Package doesn't have necessary files. It might be too old to inspect." - "Legacy noarch packages are known to fail. Full message was {}".format(e)) + log.warn( + "Package doesn't have necessary files. It might be too old to inspect." + f"Legacy noarch packages are known to fail. Full message was {e}" + ) try: crossed_subdir = metadata.config.target_subdir except AttributeError: crossed_subdir = metadata.config.host_subdir - subdir = ('noarch' if (metadata.noarch or metadata.noarch_python) - else crossed_subdir) + subdir = ( + "noarch" + if (metadata.noarch or metadata.noarch_python) + else crossed_subdir + ) if metadata.config.output_folder: output_folder = os.path.join(metadata.config.output_folder, subdir) else: - output_folder = os.path.join(os.path.dirname(metadata.config.bldpkgs_dir), subdir) + output_folder = os.path.join( + os.path.dirname(metadata.config.bldpkgs_dir), subdir + ) final_output = os.path.join(output_folder, output_filename) if os.path.isfile(final_output): utils.rm_rf(final_output) # disable locking here. It's just a temp folder getting locked. Removing it proved to be # a major bottleneck. - utils.copy_into(tmp_path, final_output, metadata.config.timeout, - locking=False) + utils.copy_into( + tmp_path, final_output, metadata.config.timeout, locking=False + ) final_outputs.append(final_output) - update_index(os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1) + _delegated_update_index( + os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1 + ) # clean out host prefix so that this output's files don't interfere with other outputs # We have a backup of how things were before any output scripts ran. That's @@ -1740,9 +1961,10 @@ def bundle_conda(output, metadata, env, stats, **kw): if metadata.config.keep_old_work: prefix = metadata.config.host_prefix - dest = os.path.join(os.path.dirname(prefix), - '_'.join(('_h_env_moved', metadata.dist(), - metadata.config.host_subdir))) + dest = os.path.join( + os.path.dirname(prefix), + "_".join(("_h_env_moved", metadata.dist(), metadata.config.host_subdir)), + ) shutil_move_more_retrying(prefix, dest, "host env") else: utils.rm_rf(metadata.config.host_prefix) @@ -1750,38 +1972,46 @@ def bundle_conda(output, metadata, env, stats, **kw): return final_outputs -def bundle_wheel(output, metadata, env, stats): +def bundle_wheel(output, metadata: MetaData, env, stats): ext = ".bat" if utils.on_win else ".sh" with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir): - dest_file = os.path.join(metadata.config.work_dir, 'wheel_output' + ext) - with open(dest_file, 'w') as f: - f.write('\n') - f.write(f'pip wheel --wheel-dir {tmpdir} --no-deps .') - f.write('\n') + dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext) + with open(dest_file, "w") as f: + f.write("\n") + f.write(f"pip wheel --wheel-dir {tmpdir} --no-deps .") + f.write("\n") if metadata.config.activate: _write_activation_text(dest_file, metadata) # run the appropriate script env = environ.get_dict(m=metadata).copy() - env['TOP_PKG_NAME'] = env['PKG_NAME'] - env['TOP_PKG_VERSION'] = env['PKG_VERSION'] - env['PKG_VERSION'] = metadata.version() - env['PKG_NAME'] = metadata.get_value('package/name') - interpreter_and_args = guess_interpreter(dest_file) + env["TOP_PKG_NAME"] = env["PKG_NAME"] + env["TOP_PKG_VERSION"] = env["PKG_VERSION"] + env["PKG_VERSION"] = metadata.version() + env["PKG_NAME"] = metadata.name() + args = guess_interpreter(dest_file) bundle_stats = {} - utils.check_call_env(interpreter_and_args + [dest_file], - cwd=metadata.config.work_dir, env=env, stats=bundle_stats) + utils.check_call_env( + [*args, dest_file], + cwd=metadata.config.work_dir, + env=env, + stats=bundle_stats, + ) log_stats(bundle_stats, f"bundling wheel {metadata.name()}") if stats is not None: - stats[stats_key(metadata, f'bundle_wheel_{metadata.name()}')] = bundle_stats + stats[stats_key(metadata, f"bundle_wheel_{metadata.name()}")] = bundle_stats wheel_files = glob(os.path.join(tmpdir, "*.whl")) if not wheel_files: - raise RuntimeError("Wheel creation failed. Please see output above to debug.") + raise RuntimeError( + "Wheel creation failed. Please see output above to debug." + ) wheel_file = wheel_files[0] if metadata.config.output_folder: - output_folder = os.path.join(metadata.config.output_folder, metadata.config.subdir) + output_folder = os.path.join( + metadata.config.output_folder, metadata.config.subdir + ) else: output_folder = metadata.config.bldpkgs_dir utils.copy_into(wheel_file, output_folder, locking=metadata.config.locking) @@ -1789,37 +2019,33 @@ def bundle_wheel(output, metadata, env, stats): def scan_metadata(path): - ''' + """ Scan all json files in 'path' and return a dictionary with their contents. Files are assumed to be in 'index.json' format. - ''' + """ installed = dict() - for filename in glob(os.path.join(path, '*.json')): + for filename in glob(os.path.join(path, "*.json")): with open(filename) as file: data = json.load(file) - installed[data['name']] = data + installed[data["name"]] = data return installed bundlers = { - 'conda': bundle_conda, - 'conda_v2': bundle_conda, - 'wheel': bundle_wheel, + "conda": bundle_conda, + "conda_v2": bundle_conda, + "wheel": bundle_wheel, } def _write_sh_activation_text(file_handle, m): cygpath_prefix = "$(cygpath -u " if utils.on_win else "" cygpath_suffix = " )" if utils.on_win else "" - activate_path = ''.join((cygpath_prefix, - os.path.join(utils.root_script_dir, 'activate').replace('\\', '\\\\'), - cygpath_suffix)) - - if conda_46: - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' - file_handle.write( - f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" - ) + + py_flags = "-I -m" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "-m" + file_handle.write( + f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + ) if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, @@ -1838,179 +2064,214 @@ def _write_sh_activation_text(file_handle, m): # exists to identify a valid conda environment # conda 4.6 changes this one final time, by adding a '--stack' flag to the 'activate' # command, and 'activate' does not stack environments by default without that flag - history_file = join(m.config.host_prefix, 'conda-meta', 'history') + history_file = join(m.config.host_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() - host_prefix_path = ''.join((cygpath_prefix, - m.config.host_prefix.replace('\\', '\\\\'), - cygpath_suffix)) - if conda_46: - file_handle.write(f"conda activate \"{host_prefix_path}\"\n") - else: - file_handle.write('source "{}" "{}"\n' .format(activate_path, host_prefix_path)) - file_handle.write('unset CONDA_PATH_BACKUP\n') - file_handle.write('export CONDA_MAX_SHLVL=2\n') + open(history_file, "a").close() + host_prefix_path = "".join( + (cygpath_prefix, m.config.host_prefix.replace("\\", "\\\\"), cygpath_suffix) + ) + file_handle.write(f'conda activate "{host_prefix_path}"\n') # Write build prefix activation AFTER host prefix, so that its executables come first - build_prefix_path = ''.join((cygpath_prefix, - m.config.build_prefix.replace('\\', '\\\\'), - cygpath_suffix)) - - if conda_46: - # Do not stack against base env when not cross. - stack = '--stack' if m.is_cross else '' - file_handle.write(f"conda activate {stack} \"{build_prefix_path}\"\n") - else: - file_handle.write(f'source "{activate_path}" "{build_prefix_path}"\n') + build_prefix_path = "".join( + (cygpath_prefix, m.config.build_prefix.replace("\\", "\\\\"), cygpath_suffix) + ) + + # Do not stack against base env when not cross. + stack = "--stack" if m.is_cross else "" + file_handle.write(f'conda activate {stack} "{build_prefix_path}"\n') - from conda_build.os_utils.external import find_executable - ccache = find_executable('ccache', m.config.build_prefix, False) + from .os_utils.external import find_executable + + ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: if isinstance(ccache, list): ccache = ccache[0] ccache_methods = {} - ccache_methods['env_vars'] = False - ccache_methods['symlinks'] = False - ccache_methods['native'] = False - if hasattr(m.config, 'ccache_method'): + ccache_methods["env_vars"] = False + ccache_methods["symlinks"] = False + ccache_methods["native"] = False + if hasattr(m.config, "ccache_method"): ccache_methods[m.config.ccache_method] = True done_necessary_env = False for method, value in ccache_methods.items(): if value: if not done_necessary_env: # file_handle.write( - # 'export CCACHE_SLOPPINESS="pch_defines,time_macros${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n') + # 'export CCACHE_SLOPPINESS="pch_defines,time_macros' + # '${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n' + # ) # file_handle.write('export CCACHE_CPP2=true\n') done_necessary_env = True - if method == 'symlinks': - dirname_ccache_ln_bin = join(m.config.build_prefix, 'ccache-ln-bin') - file_handle.write(f'mkdir {dirname_ccache_ln_bin}\n') - file_handle.write(f'pushd {dirname_ccache_ln_bin}\n') + if method == "symlinks": + dirname_ccache_ln_bin = join(m.config.build_prefix, "ccache-ln-bin") + file_handle.write(f"mkdir {dirname_ccache_ln_bin}\n") + file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") file_handle.write('if [ -n "$CC" ]; then\n') - file_handle.write(' [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n'.format(ccache=ccache)) - file_handle.write('fi\n') + file_handle.write( + f" [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n" + ) + file_handle.write("fi\n") file_handle.write('if [ -n "$CXX" ]; then\n') - file_handle.write(' [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n'.format(ccache=ccache)) - file_handle.write('fi\n') - file_handle.write('popd\n') + file_handle.write( + f" [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n" + ) + file_handle.write("fi\n") + file_handle.write("popd\n") # We really don't want to be doing this. file_handle.write(f'export "PATH={dirname_ccache_ln_bin}:$PATH"\n') - elif method == 'env_vars': + elif method == "env_vars": file_handle.write(f'export CC="{ccache} $CC"\n') file_handle.write(f'export CXX="{ccache} $CXX"\n') file_handle.write(f'export LD="{ccache} $LD"\n') - elif method == 'native': + elif method == "native": pass else: print("ccache method {} not implemented") # conda 4.4 requires a conda-meta/history file for a valid conda prefix - history_file = join(m.config.build_prefix, 'conda-meta', 'history') + history_file = join(m.config.build_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() + open(history_file, "a").close() def _write_activation_text(script_path, m): - with open(script_path, 'r+') as fh: + with open(script_path, "r+") as fh: data = fh.read() fh.seek(0) if os.path.splitext(script_path)[1].lower() == ".bat": - if m.config.build_subdir.startswith('win'): - from conda_build.utils import write_bat_activation_text write_bat_activation_text(fh, m) elif os.path.splitext(script_path)[1].lower() == ".sh": _write_sh_activation_text(fh, m) else: log = utils.get_logger(__name__) - log.warn("not adding activation to {} - I don't know how to do so for " - "this file type".format(script_path)) + log.warn( + f"not adding activation to {script_path} - I don't know how to do so for " + "this file type" + ) fh.write(data) -def create_build_envs(m, notest): - build_ms_deps = m.ms_depends('build') +def create_build_envs(m: MetaData, notest): + build_ms_deps = m.ms_depends("build") build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] - host_ms_deps = m.ms_depends('host') + host_ms_deps = m.ms_depends("host") host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] m.config._merge_build_host = m.build_is_host if m.is_cross and not m.build_is_host: - if VersionOrder(conda_version) < VersionOrder('4.3.2'): - raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") - - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, - subdir=m.config.host_subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') + host_precs = environ.get_package_records( + m.config.host_prefix, + tuple(host_ms_deps), + "host", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.host_prefix, + host_precs, + env="host", + config=m.config, + subdir=m.config.host_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) if m.build_is_host: build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + build_precs = environ.get_package_records( + m.config.build_prefix, + tuple(build_ms_deps), + "build", + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) try: if not notest: - utils.insert_variant_versions(m.meta.get('requirements', {}), - m.config.variant, 'run') - test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ - utils.ensure_list(m.get_value('requirements/run', [])) + utils.insert_variant_versions( + m.get_section("requirements"), m.config.variant, "run" + ) + test_run_ms_deps = [ + *utils.ensure_list(m.get_value("test/requires", [])), + *utils.ensure_list(m.get_value("requirements/run", [])), + ] # make sure test deps are available before taking time to create build env - environ.get_install_actions(m.config.test_prefix, - tuple(test_run_ms_deps), 'test', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + environ.get_package_records( + m.config.test_prefix, + tuple(test_run_ms_deps), + "test", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) except DependencyNeedsBuildingError as e: # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - - other_outputs = (m.other_outputs.values() if hasattr(m, 'other_outputs') else - m.get_output_metadata_set(permit_undefined_jinja=True)) - missing_deps = {MatchSpec(pkg).name for pkg in e.packages} - {out.name() for _, out in other_outputs} + other_outputs = ( + m.other_outputs.values() + if hasattr(m, "other_outputs") + else m.get_output_metadata_set(permit_undefined_jinja=True) + ) + missing_deps = {MatchSpec(pkg).name for pkg in e.packages} - { + out.name() for _, out in other_outputs + } if missing_deps: e.packages = missing_deps raise e - if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or not os.listdir(m.config.build_prefix)): - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, is_conda=m.name() == 'conda') + if ( + not m.config.dirty + or not os.path.isdir(m.config.build_prefix) + or not os.listdir(m.config.build_prefix) + ): + environ.create_env( + m.config.build_prefix, + build_precs, + env="build", + config=m.config, + subdir=m.config.build_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) -def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=False, - built_packages=None, notest=False, provision_only=False): - ''' +def build( + m: MetaData, + stats, + post=None, + need_source_download=True, + need_reparse_in_env=False, + built_packages=None, + notest=False, + provision_only=False, +): + """ Build the package with the specified metadata. :param m: Package metadata @@ -2019,7 +2280,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa post only. False means stop just before the post. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated - ''' + """ default_return = {} if not built_packages: built_packages = {} @@ -2029,15 +2290,13 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa return default_return log = utils.get_logger(__name__) - host_actions = [] - build_actions = [] + host_precs = [] + build_precs = [] output_metas = [] with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] # this should be a no-op if source is already here if m.needs_source_for_render: @@ -2056,7 +2315,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa top_level_pkg = m top_level_needs_finalizing = True for _, om in output_metas: - if om.skip() or (m.config.skip_existing and is_package_built(om, 'host')): + if om.skip() or (m.config.skip_existing and is_package_built(om, "host")): skipped.append(bldpkg_path(om)) else: package_locations.append(bldpkg_path(om)) @@ -2064,63 +2323,87 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa top_level_pkg = om top_level_needs_finalizing = False if not package_locations: - print("Packages for ", m.path or m.name(), "with variant {} " - "are already built and available from your configured channels " - "(including local) or are otherwise specified to be skipped." - .format(m.get_hash_contents())) + print( + "Packages for ", + m.path or m.name(), + f"with variant {m.get_hash_contents()} " + "are already built and available from your configured channels " + "(including local) or are otherwise specified to be skipped.", + ) return default_return if not provision_only: printed_fns = [] for pkg in package_locations: - if (os.path.splitext(pkg)[1] and any( - os.path.splitext(pkg)[1] in ext for ext in CONDA_PACKAGE_EXTENSIONS)): + if os.path.splitext(pkg)[1] and any( + os.path.splitext(pkg)[1] in ext for ext in CONDA_PACKAGE_EXTENSIONS + ): printed_fns.append(os.path.basename(pkg)) else: printed_fns.append(pkg) print("BUILD START:", printed_fns) - environ.remove_existing_packages([m.config.bldpkgs_dir], - [pkg for pkg in package_locations if pkg not in built_packages], m.config) + environ.remove_existing_packages( + [m.config.bldpkgs_dir], + [pkg for pkg in package_locations if pkg not in built_packages], + m.config, + ) - specs = [ms.spec for ms in m.ms_depends('build')] - if any(out.get('type') == 'wheel' for out in m.meta.get('outputs', [])): - specs.extend(['pip', 'wheel']) + specs = [ms.spec for ms in m.ms_depends("build")] + if any(out.get("type") == "wheel" for out in m.get_section("outputs")): + specs.extend(["pip", "wheel"]) # TODO :: This is broken. It does not respect build/script for example and also if you need git # you should add it as s build dep manually. vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source - has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, - m.config.build_prefix) or "") + has_vcs_available = os.path.isfile( + external.find_executable(vcs_executable, m.config.build_prefix) or "" + ) if not has_vcs_available: - if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): + if vcs_source != "mercurial" or not any( + spec.startswith("python") and "3." in spec for spec in specs + ): specs.append(vcs_source) - log.warn("Your recipe depends on %s at build time (for templates), " - "but you have not listed it as a build dependency. Doing " - "so for this build.", vcs_source) + log.warn( + "Your recipe depends on %s at build time (for templates), " + "but you have not listed it as a build dependency. Doing " + "so for this build.", + vcs_source, + ) else: - raise ValueError("Your recipe uses mercurial in build, but mercurial" - " does not yet support Python 3. Please handle all of " - "your mercurial actions outside of your build script.") + raise ValueError( + "Your recipe uses mercurial in build, but mercurial" + " does not yet support Python 3. Please handle all of " + "your mercurial actions outside of your build script." + ) if top_level_needs_finalizing: utils.insert_variant_versions( - top_level_pkg.meta.get('requirements', {}), top_level_pkg.config.variant, 'build') + top_level_pkg.meta.get("requirements", {}), + top_level_pkg.config.variant, + "build", + ) utils.insert_variant_versions( - top_level_pkg.meta.get('requirements', {}), top_level_pkg.config.variant, 'host') + top_level_pkg.meta.get("requirements", {}), + top_level_pkg.config.variant, + "host", + ) exclude_pattern = None - excludes = set(top_level_pkg.config.variant.get('ignore_version', [])) + excludes = set(top_level_pkg.config.variant.get("ignore_version", [])) if excludes: - for key in top_level_pkg.config.variant.get('pin_run_as_build', {}).keys(): + for key in top_level_pkg.config.variant.get( + "pin_run_as_build", {} + ).keys(): if key in excludes: excludes.remove(key) if excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in excludes)) + exclude_pattern = re.compile( + r"|".join(rf"(?:^{exc}(?:\s|$|\Z))" for exc in excludes) + ) add_upstream_pins(m, False, exclude_pattern) create_build_envs(top_level_pkg, notest) @@ -2141,7 +2424,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # Write out metadata for `conda debug`, making it obvious that this is what it is, must be done # after try_download() - output_yaml(m, os.path.join(m.config.work_dir, 'metadata_conda_debug.yaml')) + output_yaml(m, os.path.join(m.config.work_dir, "metadata_conda_debug.yaml")) # get_dir here might be just work, or it might be one level deeper, # dependening on the source. @@ -2156,65 +2439,84 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.rm_rf(m.config.info_dir) files1 = utils.prefix_files(prefix=m.config.host_prefix) - with open(join(m.config.build_folder, 'prefix_files.txt'), 'w') as f: - f.write('\n'.join(sorted(list(files1)))) - f.write('\n') + os.makedirs(m.config.build_folder, exist_ok=True) + with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: + f.write("\n".join(sorted(list(files1)))) + f.write("\n") # Use script from recipe? - script = utils.ensure_list(m.get_value('build/script', None)) + script = utils.ensure_list(m.get_value("build/script", None)) if script: - script = '\n'.join(script) + script = "\n".join(script) if isdir(src_dir): build_stats = {} if utils.on_win: - build_file = join(m.path, 'bld.bat') + build_file = join(m.path, "bld.bat") if script: - build_file = join(src_dir, 'bld.bat') + build_file = join(src_dir, "bld.bat") import codecs - with codecs.getwriter('utf-8')(open(build_file, 'wb')) as bf: + + with codecs.getwriter("utf-8")(open(build_file, "wb")) as bf: bf.write(script) - windows.build(m, build_file, stats=build_stats, provision_only=provision_only) + windows.build( + m, build_file, stats=build_stats, provision_only=provision_only + ) else: - build_file = join(m.path, 'build.sh') + build_file = join(m.path, "build.sh") if isfile(build_file) and script: - raise CondaBuildException("Found a build.sh script and a build/script section " - "inside meta.yaml. Either remove the build.sh script " - "or remove the build/script section in meta.yaml.") + raise CondaBuildException( + "Found a build.sh script and a build/script section " + "inside meta.yaml. Either remove the build.sh script " + "or remove the build/script section in meta.yaml." + ) # There is no sense in trying to run an empty build script. if isfile(build_file) or script: work_file, _ = write_build_scripts(m, script, build_file) if not provision_only: - cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-o', 'errexit', work_file] + cmd = ( + [shell_path] + + (["-x"] if m.config.debug else []) + + ["-o", "errexit", work_file] + ) # rewrite long paths in stdout back to their env variables if m.config.debug or m.config.no_rewrite_stdout_env: rewrite_env = None else: - rewrite_vars = ['PREFIX', 'SRC_DIR'] + rewrite_vars = ["PREFIX", "SRC_DIR"] if not m.build_is_host: - rewrite_vars.insert(1, 'BUILD_PREFIX') - rewrite_env = { - k: env[k] - for k in rewrite_vars if k in env - } + rewrite_vars.insert(1, "BUILD_PREFIX") + rewrite_env = {k: env[k] for k in rewrite_vars if k in env} for k, v in rewrite_env.items(): - print('{} {}={}' - .format('set' if build_file.endswith('.bat') else 'export', k, v)) + print( + "{} {}={}".format( + "set" + if build_file.endswith(".bat") + else "export", + k, + v, + ) + ) # clear this, so that the activate script will get run as necessary - del env['CONDA_BUILD'] + del env["CONDA_BUILD"] # this should raise if any problems occur while building - utils.check_call_env(cmd, env=env, rewrite_stdout_env=rewrite_env, - cwd=src_dir, stats=build_stats) + utils.check_call_env( + cmd, + env=env, + rewrite_stdout_env=rewrite_env, + cwd=src_dir, + stats=build_stats, + ) utils.remove_pycache_from_scripts(m.config.host_prefix) if build_stats and not provision_only: log_stats(build_stats, f"building {m.name()}") if stats is not None: - stats[stats_key(m, 'build')] = build_stats + stats[stats_key(m, "build")] = build_stats - prefix_file_list = join(m.config.build_folder, 'prefix_files.txt') + prefix_file_list = join(m.config.build_folder, "prefix_files.txt") initial_files = set() if os.path.isfile(prefix_file_list): with open(prefix_file_list) as f: @@ -2223,13 +2525,15 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa new_pkgs = default_return if not provision_only and post in [True, None]: - outputs = output_metas or m.get_output_metadata_set(permit_unsatisfiable_variants=False) + outputs = output_metas or m.get_output_metadata_set( + permit_unsatisfiable_variants=False + ) get_all_replacements(outputs[0][1].config) top_level_meta = m # this is the old, default behavior: conda package, with difference between start # set of files and end set of files - prefix_file_list = join(m.config.build_folder, 'prefix_files.txt') + prefix_file_list = join(m.config.build_folder, "prefix_files.txt") if os.path.isfile(prefix_file_list): with open(prefix_file_list) as f: initial_files = set(f.read().splitlines()) @@ -2237,22 +2541,27 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa initial_files = set() # subdir needs to always be some real platform - so ignore noarch. - subdir = (m.config.host_subdir if m.config.host_subdir != 'noarch' else - m.config.subdir) + subdir = ( + m.config.host_subdir + if m.config.host_subdir != "noarch" + else m.config.subdir + ) with TemporaryDirectory() as prefix_files_backup: # back up new prefix files, because we wipe the prefix before each output build for f in new_prefix_files: - utils.copy_into(os.path.join(m.config.host_prefix, f), - os.path.join(prefix_files_backup, f), - symlinks=True) + utils.copy_into( + os.path.join(m.config.host_prefix, f), + os.path.join(prefix_files_backup, f), + symlinks=True, + ) # this is the inner loop, where we loop over any vars used only by # outputs (not those used by the top-level recipe). The metadata # objects here are created by the m.get_output_metadata_set, which # is distributing the matrix of used variables. - for (output_d, m) in outputs: + for output_d, m in outputs: get_all_replacements(m.config.variants) get_all_replacements(m.config.variant) if m.skip(): @@ -2261,44 +2570,58 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # TODO: should we check both host and build envs? These are the same, except when # cross compiling - if m.config.skip_existing and is_package_built(m, 'host'): + if m.config.skip_existing and is_package_built(m, "host"): print(utils.get_skip_message(m)) new_pkgs[bldpkg_path(m)] = output_d, m continue - if (top_level_meta.name() == output_d.get('name') and not (output_d.get('files') or - output_d.get('script'))): - output_d['files'] = (utils.prefix_files(prefix=m.config.host_prefix) - - initial_files) + if top_level_meta.name() == output_d.get("name") and not ( + output_d.get("files") or output_d.get("script") + ): + output_d["files"] = ( + utils.prefix_files(prefix=m.config.host_prefix) - initial_files + ) # ensure that packaging scripts are copied over into the workdir - if 'script' in output_d: - utils.copy_into(os.path.join(m.path, output_d['script']), m.config.work_dir) + if "script" in output_d: + utils.copy_into( + os.path.join(m.path, output_d["script"]), m.config.work_dir + ) # same thing, for test scripts - test_script = output_d.get('test', {}).get('script') + test_script = output_d.get("test", {}).get("script") if test_script: if not os.path.isfile(os.path.join(m.path, test_script)): - raise ValueError("test script specified as {} does not exist. Please " - "check for typos or create the file and try again." - .format(test_script)) - utils.copy_into(os.path.join(m.path, test_script), - os.path.join(m.config.work_dir, test_script)) - - assert output_d.get('type') != 'conda' or m.final, ( - f"output metadata for {m.dist()} is not finalized") + raise ValueError( + f"test script specified as {test_script} does not exist. Please " + "check for typos or create the file and try again." + ) + utils.copy_into( + os.path.join(m.path, test_script), + os.path.join(m.config.work_dir, test_script), + ) + + assert ( + output_d.get("type") != "conda" or m.final + ), f"output metadata for {m.dist()} is not finalized" pkg_path = bldpkg_path(m) if pkg_path not in built_packages and pkg_path not in new_pkgs: log.info(f"Packaging {m.name()}") # for more than one output, we clear and rebuild the environment before each # package. We also do this for single outputs that present their own # build reqs. - if not (m.is_output or - (os.path.isdir(m.config.host_prefix) and - len(os.listdir(m.config.host_prefix)) <= 1)): + if not ( + m.is_output + or ( + os.path.isdir(m.config.host_prefix) + and len(os.listdir(m.config.host_prefix)) <= 1 + ) + ): # This log message contradicts both the not (m.is_output or ..) check above # and also the comment "For more than one output, ..." - log.debug('Not creating new env for output - already exists from top-level') + log.debug( + "Not creating new env for output - already exists from top-level" + ) else: m.config._merge_build_host = m.build_is_host @@ -2306,95 +2629,138 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.rm_rf(m.config.build_prefix) utils.rm_rf(m.config.test_prefix) - host_ms_deps = m.ms_depends('host') - sub_build_ms_deps = m.ms_depends('build') + host_ms_deps = m.ms_depends("host") + sub_build_ms_deps = m.ms_depends("build") if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', - config=m.config, subdir=subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') + host_precs = environ.get_package_records( + m.config.host_prefix, + tuple(host_ms_deps), + "host", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.host_prefix, + host_precs, + env="host", + config=m.config, + subdir=subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) else: # When not cross-compiling, the build deps aggregate 'build' and 'host'. sub_build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(sub_build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, - is_conda=m.name() == 'conda') + build_precs = environ.get_package_records( + m.config.build_prefix, + tuple(sub_build_ms_deps), + "build", + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.build_prefix, + build_precs, + env="build", + config=m.config, + subdir=m.config.build_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) to_remove = set() - for f in output_d.get('files', []): - if f.startswith('conda-meta'): + for f in output_d.get("files", []): + if f.startswith("conda-meta"): to_remove.add(f) # This is wrong, files has not been expanded at this time and could contain # wildcards. Also well, I just do not understand this, because when this # does contain wildcards, the files in to_remove will slip back in. - if 'files' in output_d: - output_d['files'] = set(output_d['files']) - to_remove + if "files" in output_d: + output_d["files"] = set(output_d["files"]) - to_remove # copies the backed-up new prefix files into the newly created host env for f in new_prefix_files: - utils.copy_into(os.path.join(prefix_files_backup, f), - os.path.join(m.config.host_prefix, f), - symlinks=True) + utils.copy_into( + os.path.join(prefix_files_backup, f), + os.path.join(m.config.host_prefix, f), + symlinks=True, + ) # we must refresh the environment variables because our env for each package # can be different from the env for the top level build. with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) - pkg_type = 'conda' if not hasattr(m, 'type') else m.type + pkg_type = "conda" if not hasattr(m, "type") else m.type newly_built_packages = bundlers[pkg_type](output_d, m, env, stats) # warn about overlapping files. - if 'checksums' in output_d: - for file, csum in output_d['checksums'].items(): + if "checksums" in output_d: + for file, csum in output_d["checksums"].items(): for _, prev_om in new_pkgs.items(): prev_output_d, _ = prev_om - if file in prev_output_d.get('checksums', {}): - prev_csum = prev_output_d['checksums'][file] - nature = 'Exact' if csum == prev_csum else 'Inexact' - log.warning("{} overlap between {} in packages {} and {}" - .format(nature, file, output_d['name'], - prev_output_d['name'])) + if file in prev_output_d.get("checksums", {}): + prev_csum = prev_output_d["checksums"][file] + nature = "Exact" if csum == prev_csum else "Inexact" + log.warning( + "{} overlap between {} in packages {} and {}".format( + nature, + file, + output_d["name"], + prev_output_d["name"], + ) + ) for built_package in newly_built_packages: new_pkgs[built_package] = (output_d, m) # must rebuild index because conda has no way to incrementally add our last # package to the index. - subdir = ('noarch' if (m.noarch or m.noarch_python) - else m.config.host_subdir) + index_subdir = ( + "noarch" + if (m.noarch or m.noarch_python) + else m.config.host_subdir + ) if m.is_cross: - get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout, clear_cache=True) - get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout, clear_cache=True) + get_build_index( + subdir=index_subdir, + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + clear_cache=True, + omit_defaults=False, + ) + get_build_index( + subdir=index_subdir, + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + clear_cache=True, + omit_defaults=False, + ) else: if not provision_only: print("STOPPING BUILD BEFORE POST:", m.dist()) @@ -2403,53 +2769,68 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa return new_pkgs -def guess_interpreter(script_filename): - # -l is needed for MSYS2 as the login scripts set some env. vars (TMP, TEMP) - # Since the MSYS2 installation is probably a set of conda packages we do not - # need to worry about system environmental pollution here. For that reason I - # do not pass -l on other OSes. - extensions_to_run_commands = {'.sh': ['bash.exe', '-el'] if utils.on_win else ['bash', '-e'], - '.bat': [os.environ.get('COMSPEC', 'cmd.exe'), '/d', '/c'], - '.ps1': ['powershell', '-executionpolicy', 'bypass', '-File'], - '.py': ['python']} - file_ext = os.path.splitext(script_filename)[1] - for ext, command in extensions_to_run_commands.items(): - if file_ext.lower().startswith(ext): - interpreter_command = command - break - else: - raise NotImplementedError("Don't know how to run {} file. Please specify " - "script_interpreter for {} output".format(file_ext, - script_filename)) - return interpreter_command +# -l is needed for MSYS2 as the login scripts set some env. vars (TMP, TEMP) +# Since the MSYS2 installation is probably a set of conda packages we do not +# need to worry about system environmental pollution here. For that reason I +# do not pass -l on other OSes. +INTERPRETER_BASH = ("bash.exe", "-el") if on_win else ("bash", "-e") +INTERPRETER_BAT = (os.getenv("COMSPEC", "cmd.exe"), "/d", "/c") +INTERPRETER_POWERSHELL = ("powershell", "-ExecutionPolicy", "ByPass", "-File") +INTERPRETER_PYTHON = ("python",) + + +def guess_interpreter(script_filename: str | os.PathLike | Path) -> tuple[str, ...]: + suffix = Path(script_filename).suffix + try: + return { + ".sh": INTERPRETER_BASH, + ".bat": INTERPRETER_BAT, + ".ps1": INTERPRETER_POWERSHELL, + ".py": INTERPRETER_PYTHON, + }[suffix] + except KeyError: + # KeyError: unknown suffix + raise NotImplementedError( + f"Don't know how to run {suffix} file. Please specify " + f"script_interpreter for {script_filename} output" + ) def warn_on_use_of_SRC_DIR(metadata): - test_files = glob(os.path.join(metadata.path, 'run_test*')) + test_files = glob(os.path.join(metadata.path, "run_test*")) for f in test_files: with open(f) as _f: contents = _f.read() - if ("SRC_DIR" in contents and 'source_files' not in metadata.get_section('test') and - metadata.config.remove_work_dir): - raise ValueError("In conda-build 2.1+, the work dir is removed by default before the " - "test scripts run. You are using the SRC_DIR variable in your test " - "script, but these files have been deleted. Please see the " - " documentation regarding the test/source_files meta.yaml section, " - "or pass the --no-remove-work-dir flag.") + if ( + "SRC_DIR" in contents + and "source_files" not in metadata.get_section("test") + and metadata.config.remove_work_dir + ): + raise ValueError( + "In conda-build 2.1+, the work dir is removed by default before the " + "test scripts run. You are using the SRC_DIR variable in your test " + "script, but these files have been deleted. Please see the " + " documentation regarding the test/source_files meta.yaml section, " + "or pass the --no-remove-work-dir flag." + ) def _construct_metadata_for_test_from_recipe(recipe_dir, config): config.need_cleanup = False config.recipe_dir = None hash_input = {} - metadata = expand_outputs(render_recipe(recipe_dir, config=config, reset_build_id=False))[0][1] + metadata = expand_outputs( + render_recipe(recipe_dir, config=config, reset_build_id=False) + )[0][1] log = utils.get_logger(__name__) - log.warn("Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust " - "your code to pass your desired conda package to test instead.") + log.warn( + "Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust " + "your code to pass your desired conda package to test instead." + ) utils.rm_rf(metadata.config.test_dir) - if metadata.meta.get('test', {}).get('source_files'): + if metadata.meta.get("test", {}).get("source_files"): if not metadata.source_provided: try_download(metadata, no_download_source=False) @@ -2462,16 +2843,15 @@ def _construct_metadata_for_test_from_package(package, config): config.recipe_dir = recipe_dir hash_input = {} - info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) - with open(os.path.join(info_dir, 'index.json')) as f: + info_dir = os.path.normpath(os.path.join(recipe_dir, "info")) + with open(os.path.join(info_dir, "index.json")) as f: package_data = json.load(f) - if package_data['subdir'] != 'noarch': - config.host_subdir = package_data['subdir'] + config.host_subdir = package_data["subdir"] # We may be testing an (old) package built without filename hashing. - hash_input = os.path.join(info_dir, 'hash_input.json') + hash_input = os.path.join(info_dir, "hash_input.json") if os.path.isfile(hash_input): - with open(os.path.join(info_dir, 'hash_input.json')) as f: + with open(os.path.join(info_dir, "hash_input.json")) as f: hash_input = json.load(f) else: config.filename_hashing = False @@ -2488,16 +2868,18 @@ def _construct_metadata_for_test_from_package(package, config): # get last part of the path last_element = os.path.basename(local_pkg_location) is_channel = False - for platform in ('win-', 'linux-', 'osx-', 'noarch'): + for platform in ("win-", "linux-", "osx-", "noarch"): if last_element.startswith(platform): is_channel = True if not is_channel: - log.warn("Copying package to conda-build croot. No packages otherwise alongside yours will" - " be available unless you specify -c local. To avoid this warning, your package " - "must reside in a channel structure with platform-subfolders. See more info on " - "what a valid channel is at " - "https://conda.io/docs/user-guide/tasks/create-custom-channels.html") + log.warn( + "Copying package to conda-build croot. No packages otherwise alongside yours will" + " be available unless you specify -c local. To avoid this warning, your package " + "must reside in a channel structure with platform-subfolders. See more info on " + "what a valid channel is at " + "https://conda.io/docs/user-guide/tasks/create-custom-channels.html" + ) local_dir = config.bldpkgs_dir try: @@ -2511,33 +2893,42 @@ def _construct_metadata_for_test_from_package(package, config): local_channel = os.path.dirname(local_pkg_location) # update indices in the channel - update_index(local_channel, verbose=config.debug, threads=1) + _delegated_update_index(local_channel, verbose=config.debug, threads=1) try: - metadata = render_recipe(os.path.join(info_dir, 'recipe'), config=config, - reset_build_id=False)[0][0] + metadata = render_recipe( + os.path.join(info_dir, "recipe"), config=config, reset_build_id=False + )[0][0] # no recipe in package. Fudge metadata except (OSError, SystemExit): # force the build string to line up - recomputing it would # yield a different result - metadata = MetaData.fromdict({'package': {'name': package_data['name'], - 'version': package_data['version']}, - 'build': {'number': int(package_data['build_number']), - 'string': package_data['build']}, - 'requirements': {'run': package_data['depends']} - }, config=config) + metadata = MetaData.fromdict( + { + "package": { + "name": package_data["name"], + "version": package_data["version"], + }, + "build": { + "number": int(package_data["build_number"]), + "string": package_data["build"], + }, + "requirements": {"run": package_data["depends"]}, + }, + config=config, + ) # HACK: because the recipe is fully baked, detecting "used" variables no longer works. The set # of variables in the hash_input suffices, though. if metadata.noarch: - metadata.config.variant['target_platform'] = "noarch" + metadata.config.variant["target_platform"] = "noarch" metadata.config.used_vars = list(hash_input.keys()) urls = list(utils.ensure_list(metadata.config.channel_urls)) local_path = url_path(local_channel) # replace local with the appropriate real channel. Order is maintained. - urls = [url if url != 'local' else local_path for url in urls] + urls = [url if url != "local" else local_path for url in urls] if local_path not in urls: urls.insert(0, local_path) metadata.config.channel_urls = urls @@ -2546,10 +2937,14 @@ def _construct_metadata_for_test_from_package(package, config): def _extract_test_files_from_package(metadata): - recipe_dir = metadata.config.recipe_dir if hasattr(metadata.config, "recipe_dir") else metadata.path + recipe_dir = ( + metadata.config.recipe_dir + if hasattr(metadata.config, "recipe_dir") + else metadata.path + ) if recipe_dir: - info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) - test_files = os.path.join(info_dir, 'test') + info_dir = os.path.normpath(os.path.join(recipe_dir, "info")) + test_files = os.path.join(info_dir, "test") if os.path.exists(test_files) and os.path.isdir(test_files): # things are re-extracted into the test dir because that's cwd when tests are run, # and provides the most intuitive experience. This is a little @@ -2557,35 +2952,50 @@ def _extract_test_files_from_package(metadata): # work_dir, for legacy behavior where people aren't using # test/source_files. It would be better to change SRC_DIR in # test phase to always point to test_dir. Maybe one day. - utils.copy_into(test_files, metadata.config.test_dir, - metadata.config.timeout, symlinks=True, - locking=metadata.config.locking, clobber=True) - dependencies_file = os.path.join(test_files, 'test_time_dependencies.json') + utils.copy_into( + test_files, + metadata.config.test_dir, + metadata.config.timeout, + symlinks=True, + locking=metadata.config.locking, + clobber=True, + ) + dependencies_file = os.path.join(test_files, "test_time_dependencies.json") test_deps = [] if os.path.isfile(dependencies_file): with open(dependencies_file) as f: test_deps = json.load(f) - test_section = metadata.meta.get('test', {}) - test_section['requires'] = test_deps - metadata.meta['test'] = test_section + test_section = metadata.meta.get("test", {}) + test_section["requires"] = test_deps + metadata.meta["test"] = test_section else: - if metadata.meta.get('test', {}).get('source_files'): + if metadata.meta.get("test", {}).get("source_files"): if not metadata.source_provided: try_download(metadata, no_download_source=False) def construct_metadata_for_test(recipedir_or_package, config): - if os.path.isdir(recipedir_or_package) or os.path.basename(recipedir_or_package) == 'meta.yaml': - m, hash_input = _construct_metadata_for_test_from_recipe(recipedir_or_package, config) + if ( + os.path.isdir(recipedir_or_package) + or os.path.basename(recipedir_or_package) == "meta.yaml" + ): + m, hash_input = _construct_metadata_for_test_from_recipe( + recipedir_or_package, config + ) else: - m, hash_input = _construct_metadata_for_test_from_package(recipedir_or_package, config) + m, hash_input = _construct_metadata_for_test_from_package( + recipedir_or_package, config + ) return m, hash_input def write_build_scripts(m, script, build_file): - with utils.path_prepended(m.config.host_prefix): - with utils.path_prepended(m.config.build_prefix): + # TODO: Prepending the prefixes here should probably be guarded by + # if not m.activate_build_script: + # Leaving it as is, for now, since we need a quick, non-disruptive patch release. + with utils.path_prepended(m.config.host_prefix, False): + with utils.path_prepended(m.config.build_prefix, False): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" @@ -2595,7 +3005,7 @@ def write_build_scripts(m, script, build_file): # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # - env["PIP_NO_BUILD_ISOLATION"] = 'False' + env["PIP_NO_BUILD_ISOLATION"] = "False" # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. env["PIP_NO_DEPENDENCIES"] = True @@ -2605,7 +3015,7 @@ def write_build_scripts(m, script, build_file): # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. - env['PIP_CACHE_DIR'] = m.config.pip_cache_dir + env["PIP_CACHE_DIR"] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. @@ -2618,18 +3028,18 @@ def write_build_scripts(m, script, build_file): if "replacements" in env: del env["replacements"] - work_file = join(m.config.work_dir, 'conda_build.sh') - env_file = join(m.config.work_dir, 'build_env_setup.sh') - with open(env_file, 'w') as bf: + work_file = join(m.config.work_dir, "conda_build.sh") + env_file = join(m.config.work_dir, "build_env_setup.sh") + with open(env_file, "w") as bf: for k, v in env.items(): - if v != '' and v is not None: + if v != "" and v is not None: bf.write(f'export {k}="{v}"\n') if m.activate_build_script: _write_sh_activation_text(bf, m) - with open(work_file, 'w') as bf: + with open(work_file, "w") as bf: # bf.write('set -ex\n') - bf.write('if [ -z ${CONDA_BUILD+x} ]; then\n') + bf.write("if [ -z ${CONDA_BUILD+x} ]; then\n") bf.write(f" source {env_file}\n") bf.write("fi\n") if script: @@ -2641,46 +3051,73 @@ def write_build_scripts(m, script, build_file): return work_file, env_file -def _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, - lua_files, r_files, shell_files, trace): +def _write_test_run_script( + metadata, + test_run_script, + test_env_script, + py_files, + pl_files, + lua_files, + r_files, + shell_files, + trace, +): log = utils.get_logger(__name__) - with open(test_run_script, 'w') as tf: - tf.write('{source} "{test_env_script}"\n'.format( - source="call" if utils.on_win else "source", - test_env_script=test_env_script)) + with open(test_run_script, "w") as tf: + tf.write( + '{source} "{test_env_script}"\n'.format( + source="call" if utils.on_win else "source", + test_env_script=test_env_script, + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") else: - tf.write(f'set {trace}-e\n') + tf.write(f"set {trace}-e\n") if py_files: test_python = metadata.config.test_python # use pythonw for import tests when osx_is_app is set - if metadata.get_value('build/osx_is_app') and sys.platform == 'darwin': - test_python = test_python + 'w' - tf.write('"{python}" -s "{test_file}"\n'.format( - python=test_python, - test_file=join(metadata.config.test_dir, 'run_test.py'))) + if metadata.get_value("build/osx_is_app") and on_mac: + test_python = test_python + "w" + tf.write( + '"{python}" -s "{test_file}"\n'.format( + python=test_python, + test_file=join(metadata.config.test_dir, "run_test.py"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if pl_files: - tf.write('"{perl}" "{test_file}"\n'.format( - perl=metadata.config.perl_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.pl'))) + tf.write( + '"{perl}" "{test_file}"\n'.format( + perl=metadata.config.perl_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.pl"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if lua_files: - tf.write('"{lua}" "{test_file}"\n'.format( - lua=metadata.config.lua_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.lua'))) + tf.write( + '"{lua}" "{test_file}"\n'.format( + lua=metadata.config.lua_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.lua"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if r_files: - tf.write('"{r}" "{test_file}"\n'.format( - r=metadata.config.rscript_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.r'))) + tf.write( + '"{r}" "{test_file}"\n'.format( + r=metadata.config.rscript_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.r"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if shell_files: @@ -2690,20 +3127,26 @@ def _write_test_run_script(metadata, test_run_script, test_env_script, py_files, tf.write(f'call "{shell_file}"\n') tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") else: - log.warn("Found sh test file on windows. Ignoring this for now (PRs welcome)") + log.warn( + "Found sh test file on windows. Ignoring this for now (PRs welcome)" + ) elif os.path.splitext(shell_file)[1] == ".sh": # TODO: Run the test/commands here instead of in run_test.py - tf.write('"{shell_path}" {trace}-e "{test_file}"\n'.format(shell_path=shell_path, - test_file=shell_file, - trace=trace)) + tf.write(f'"{shell_path}" {trace}-e "{shell_file}"\n') -def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace=""): - if not metadata.config.activate or metadata.name() == 'conda': +def write_test_scripts( + metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace="" +): + if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory - env_vars = utils.prepend_bin_path(env_vars, metadata.config.test_prefix, prepend_prefix=True) + env_vars = utils.prepend_bin_path( + env_vars, metadata.config.test_prefix, prepend_prefix=True + ) if utils.on_win: - env_vars['PATH'] = metadata.config.test_prefix + os.pathsep + env_vars['PATH'] + env_vars["PATH"] = ( + metadata.config.test_prefix + os.pathsep + env_vars["PATH"] + ) # set variables like CONDA_PY in the test environment env_vars.update(set_language_env_vars(metadata.config.variant)) @@ -2711,128 +3154,168 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file # Python 2 Windows requires that envs variables be string, not unicode env_vars = {str(key): str(value) for key, value in env_vars.items()} suffix = "bat" if utils.on_win else "sh" - test_env_script = join(metadata.config.test_dir, - f"conda_test_env_vars.{suffix}") - test_run_script = join(metadata.config.test_dir, - f"conda_test_runner.{suffix}") + test_env_script = join(metadata.config.test_dir, f"conda_test_env_vars.{suffix}") + test_run_script = join(metadata.config.test_dir, f"conda_test_runner.{suffix}") - with open(test_env_script, 'w') as tf: + with open(test_env_script, "w") as tf: if not utils.on_win: - tf.write(f'set {trace}-e\n') - if metadata.config.activate and not metadata.name() == 'conda': - ext = ".bat" if utils.on_win else "" - if conda_46: - if utils.on_win: - tf.write( - 'set "CONDA_SHLVL=" ' - '&& @CALL {}\\condabin\\conda_hook.bat {}' - '&& set CONDA_EXE={python_exe}' - '&& set CONDA_PYTHON_EXE={python_exe}' - '&& set _CE_I={}' - '&& set _CE_M=-m' - '&& set _CE_CONDA=conda\n'.format( - sys.prefix, - '--dev' if metadata.config.debug else '', - "-i" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "", - python_exe=sys.executable - ) - ) - else: - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' - tf.write( - f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + tf.write(f"set {trace}-e\n") + if metadata.config.activate and not metadata.name() == "conda": + if utils.on_win: + tf.write( + 'set "CONDA_SHLVL=" ' + "&& @CALL {}\\condabin\\conda_hook.bat {}" + "&& set CONDA_EXE={python_exe}" + "&& set CONDA_PYTHON_EXE={python_exe}" + "&& set _CE_I={}" + "&& set _CE_M=-m" + "&& set _CE_CONDA=conda\n".format( + sys.prefix, + "--dev" if metadata.config.debug else "", + "-i" + if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") + else "", + python_exe=sys.executable, ) - tf.write(f'conda activate "{metadata.config.test_prefix}"\n') + ) else: - tf.write('{source} "{conda_root}activate{ext}" "{test_env}"\n'.format( - conda_root=utils.root_script_dir + os.path.sep, - source="call" if utils.on_win else "source", - ext=ext, - test_env=metadata.config.test_prefix)) + py_flags = ( + "-I -m" + if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") + else "-m" + ) + tf.write( + f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + ) + tf.write(f'conda activate "{metadata.config.test_prefix}"\n') if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") # In-case people source this, it's essential errors are not fatal in an interactive shell. if not utils.on_win: - tf.write('set +e\n') - - _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, - lua_files, r_files, shell_files, trace) + tf.write("set +e\n") + + _write_test_run_script( + metadata, + test_run_script, + test_env_script, + py_files, + pl_files, + lua_files, + r_files, + shell_files, + trace, + ) return test_run_script, test_env_script -def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, provision_only=False): - ''' +def test( + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + config: Config, + stats: dict, + move_broken: bool = True, + provision_only: bool = False, +) -> bool: + """ Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata - ''' + """ log = utils.get_logger(__name__) # we want to know if we're dealing with package input. If so, we can move the input on success. hash_input = {} # store this name to keep it consistent. By changing files, we change the hash later. # It matches the build hash now, so let's keep it around. - test_package_name = (recipedir_or_package_or_metadata.dist() - if hasattr(recipedir_or_package_or_metadata, 'dist') - else recipedir_or_package_or_metadata) + test_package_name = ( + recipedir_or_package_or_metadata.dist() + if hasattr(recipedir_or_package_or_metadata, "dist") + else recipedir_or_package_or_metadata + ) if not provision_only: print("TEST START:", test_package_name) - if hasattr(recipedir_or_package_or_metadata, 'config'): + if hasattr(recipedir_or_package_or_metadata, "config"): metadata = recipedir_or_package_or_metadata utils.rm_rf(metadata.config.test_dir) else: - metadata, hash_input = construct_metadata_for_test(recipedir_or_package_or_metadata, - config) + metadata, hash_input = construct_metadata_for_test( + recipedir_or_package_or_metadata, config + ) - trace = '-x ' if metadata.config.debug else '' + trace = "-x " if metadata.config.debug else "" # Must download *after* computing build id, or else computing build id will change # folder destination _extract_test_files_from_package(metadata) - # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it. - # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed - # I think we can remove this call to clean_pkg_cache(). - in_pkg_cache = (not hasattr(recipedir_or_package_or_metadata, 'config') and - os.path.isfile(recipedir_or_package_or_metadata) and - recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) and - os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]) + # Remove any previously cached build from the package cache to ensure we + # really test the requested build and not some clashing or corrupted build. + # (Corruption of the extracted package can happen, e.g., in multi-output + # builds if one of the subpackages overwrites files from the other.) + # Special case: + # If test is requested for .tar.bz2/.conda file from the pkgs dir itself, + # clean_pkg_cache() will remove it; don't call that function in this case. + in_pkg_cache = ( + not hasattr(recipedir_or_package_or_metadata, "config") + and os.path.isfile(recipedir_or_package_or_metadata) + and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) + and any( + os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dir + for pkgs_dir in context.pkgs_dirs + ) + ) if not in_pkg_cache: environ.clean_pkg_cache(metadata.dist(), metadata.config) copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir - _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(metadata) - if not any([py_files, shell_files, pl_files, lua_files, r_files]) and not metadata.config.test_run_post: + _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files( + metadata + ) + if ( + not any([py_files, shell_files, pl_files, lua_files, r_files]) + and not metadata.config.test_run_post + ): print("Nothing to test for:", test_package_name) return True if metadata.config.remove_work_dir: - for name, prefix in (('host', metadata.config.host_prefix), - ('build', metadata.config.build_prefix)): + for name, prefix in ( + ("host", metadata.config.host_prefix), + ("build", metadata.config.build_prefix), + ): if os.path.isdir(prefix): # move host folder to force hardcoded paths to host env to break during tests # (so that they can be properly addressed by recipe author) - dest = os.path.join(os.path.dirname(prefix), - '_'.join(('%s_prefix_moved' % name, metadata.dist(), - getattr(metadata.config, '%s_subdir' % name)))) + dest = os.path.join( + os.path.dirname(prefix), + "_".join( + ( + f"{name}_prefix_moved", + metadata.dist(), + getattr(metadata.config, f"{name}_subdir"), + ) + ), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(prefix, dest, f"{prefix} prefix") # nested if so that there's no warning when we just leave the empty workdir in place if metadata.source_provided: - dest = os.path.join(os.path.dirname(metadata.config.work_dir), - '_'.join(('work_moved', metadata.dist(), - metadata.config.host_subdir))) + dest = os.path.join( + os.path.dirname(metadata.config.work_dir), + "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(config.work_dir, dest, "work") else: - log.warn("Not moving work directory after build. Your package may depend on files " - "in the work directory that are not included with your package") + log.warn( + "Not moving work directory after build. Your package may depend on files " + "in the work directory that are not included with your package" + ) get_build_metadata(metadata) @@ -2843,70 +3326,94 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov env.update(environ.get_dict(m=metadata, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" env["CONDA_BUILD"] = "1" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] - if not metadata.config.activate or metadata.name() == 'conda': + if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory - env = utils.prepend_bin_path(env, metadata.config.test_prefix, prepend_prefix=True) + env = utils.prepend_bin_path( + env, metadata.config.test_prefix, prepend_prefix=True + ) if utils.on_win: - env['PATH'] = metadata.config.test_prefix + os.pathsep + env['PATH'] + env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"] - env['PREFIX'] = metadata.config.test_prefix - if 'BUILD_PREFIX' in env: - del env['BUILD_PREFIX'] + env["PREFIX"] = metadata.config.test_prefix + if "BUILD_PREFIX" in env: + del env["BUILD_PREFIX"] # In the future, we will need to support testing cross compiled # packages on physical hardware. until then it is expected that # something like QEMU or Wine will be used on the build machine, # therefore, for now, we use host_subdir. - subdir = ('noarch' if (metadata.noarch or metadata.noarch_python) - else metadata.config.host_subdir) + subdir = ( + "noarch" + if (metadata.noarch or metadata.noarch_python) + else metadata.config.host_subdir + ) # ensure that the test prefix isn't kept between variants utils.rm_rf(metadata.config.test_prefix) try: - actions = environ.get_install_actions(metadata.config.test_prefix, - tuple(specs), 'host', - subdir=subdir, - debug=metadata.config.debug, - verbose=metadata.config.verbose, - locking=metadata.config.locking, - bldpkgs_dirs=tuple(metadata.config.bldpkgs_dirs), - timeout=metadata.config.timeout, - disable_pip=metadata.config.disable_pip, - max_env_retry=metadata.config.max_env_retry, - output_folder=metadata.config.output_folder, - channel_urls=tuple(metadata.config.channel_urls)) - except (DependencyNeedsBuildingError, NoPackagesFoundError, UnsatisfiableError, - CondaError, AssertionError) as exc: - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, - config=metadata.config) + precs = environ.get_package_records( + metadata.config.test_prefix, + tuple(specs), + "host", + subdir=subdir, + debug=metadata.config.debug, + verbose=metadata.config.verbose, + locking=metadata.config.locking, + bldpkgs_dirs=tuple(metadata.config.bldpkgs_dirs), + timeout=metadata.config.timeout, + disable_pip=metadata.config.disable_pip, + max_env_retry=metadata.config.max_env_retry, + output_folder=metadata.config.output_folder, + channel_urls=tuple(metadata.config.channel_urls), + ) + except ( + DependencyNeedsBuildingError, + NoPackagesFoundError, + UnsatisfiableError, + CondaError, + AssertionError, + ) as exc: + log.warn( + "failed to get package records, retrying. exception was: %s", str(exc) + ) + tests_failed( + metadata, + move_broken=move_broken, + broken_dir=metadata.config.broken_dir, + config=metadata.config, + ) raise # upgrade the warning from silently clobbering to warning. If it is preventing, just # keep it that way. - conflict_verbosity = ('warn' if str(context.path_conflict) == 'clobber' else - str(context.path_conflict)) - with env_var('CONDA_PATH_CONFLICT', conflict_verbosity, reset_context): - environ.create_env(metadata.config.test_prefix, actions, config=metadata.config, - env='host', subdir=subdir, is_cross=metadata.is_cross, - is_conda=metadata.name() == 'conda') + conflict_verbosity = ( + "warn" + if str(context.path_conflict) == "clobber" + else str(context.path_conflict) + ) + with env_var("CONDA_PATH_CONFLICT", conflict_verbosity, reset_context): + environ.create_env( + metadata.config.test_prefix, + precs, + config=metadata.config, + env="host", + subdir=subdir, + is_cross=metadata.is_cross, + is_conda=metadata.name() == "conda", + ) with utils.path_prepended(metadata.config.test_prefix): env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: - from conda_build.utils import get_installed_packages + from .utils import get_installed_packages + installed = get_installed_packages(metadata.config.test_prefix) - files = installed[metadata.meta['package']['name']]['files'] + files = installed[metadata.meta["package"]["name"]]["files"] replacements = get_all_replacements(metadata.config) try_download(metadata, False, True) create_info_files(metadata, replacements, files, metadata.config.test_prefix) @@ -2917,14 +3424,20 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov # intuitive relative path behavior, though, not work_dir, so we need to adjust where # SRC_DIR points. The initial CWD during tests is test_dir. if metadata.config.remove_work_dir: - env['SRC_DIR'] = metadata.config.test_dir + env["SRC_DIR"] = metadata.config.test_dir - test_script, _ = write_test_scripts(metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace) + test_script, _ = write_test_scripts( + metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace + ) if utils.on_win: - cmd = [os.environ.get('COMSPEC', 'cmd.exe'), "/d", "/c", test_script] + cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script] else: - cmd = [shell_path] + (['-x'] if metadata.config.debug else []) + ['-o', 'errexit', test_script] + cmd = ( + [shell_path] + + (["-x"] if metadata.config.debug else []) + + ["-o", "errexit", test_script] + ) try: test_stats = {} if not provision_only: @@ -2932,24 +3445,36 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov if metadata.config.debug or metadata.config.no_rewrite_stdout_env: rewrite_env = None else: - rewrite_env = { - k: env[k] - for k in ['PREFIX', 'SRC_DIR'] if k in env - } + rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env} if metadata.config.verbose: for k, v in rewrite_env.items(): - print('{} {}={}' - .format('set' if test_script.endswith('.bat') else 'export', k, v)) - utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats, rewrite_stdout_env=rewrite_env) + print( + "{} {}={}".format( + "set" if test_script.endswith(".bat") else "export", + k, + v, + ) + ) + utils.check_call_env( + cmd, + env=env, + cwd=metadata.config.test_dir, + stats=test_stats, + rewrite_stdout_env=rewrite_env, + ) log_stats(test_stats, f"testing {metadata.name()}") if stats is not None and metadata.config.variants: - stats[stats_key(metadata, f'test_{metadata.name()}')] = test_stats - if os.path.exists(join(metadata.config.test_dir, 'TEST_FAILED')): - raise subprocess.CalledProcessError(-1, '') + stats[stats_key(metadata, f"test_{metadata.name()}")] = test_stats + if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")): + raise subprocess.CalledProcessError(-1, "") print("TEST END:", test_package_name) except subprocess.CalledProcessError as _: # noqa - tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, - config=metadata.config) + tests_failed( + metadata, + move_broken=move_broken, + broken_dir=metadata.config.broken_dir, + config=metadata.config, + ) raise if config.need_cleanup and config.recipe_dir is not None and not provision_only: @@ -2959,16 +3484,16 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov def tests_failed(package_or_metadata, move_broken, broken_dir, config): - ''' + """ Causes conda to exit if any of the given package's tests failed. :param m: Package's metadata :type m: Metadata - ''' + """ if not isdir(broken_dir): os.makedirs(broken_dir) - if hasattr(package_or_metadata, 'config'): + if hasattr(package_or_metadata, "config"): pkg = bldpkg_path(package_or_metadata) else: pkg = package_or_metadata @@ -2978,36 +3503,44 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): log = utils.get_logger(__name__) try: shutil.move(pkg, dest) - log.warn('Tests failed for {} - moving package to {}'.format(os.path.basename(pkg), - broken_dir)) + log.warn( + f"Tests failed for {os.path.basename(pkg)} - moving package to {broken_dir}" + ) except OSError: pass - update_index(os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1) + _delegated_update_index( + os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1 + ) sys.exit("TESTS FAILED: " + os.path.basename(pkg)) def check_external(): - if sys.platform.startswith('linux'): - patchelf = external.find_executable('patchelf') + if on_linux: + patchelf = external.find_executable("patchelf") if patchelf is None: sys.exit( "Error:\n" - " Did not find 'patchelf' in: {}\n" + f" Did not find 'patchelf' in: {os.pathsep.join(external.dir_paths)}\n" " 'patchelf' is necessary for building conda packages on Linux with\n" " relocatable ELF libraries. You can install patchelf using conda install\n" - " patchelf.\n".format( - os.pathsep.join(external.dir_paths), - ) + " patchelf.\n" ) -def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None): - +def build_tree( + recipe_list: Iterable[str | MetaData], + config: Config, + stats: dict, + build_only: bool = False, + post: bool | None = None, + notest: bool = False, + variants: dict[str, Any] | None = None, +) -> list[str]: to_build_recursive = [] recipe_list = deque(recipe_list) if utils.on_win: - trash_dir = os.path.join(os.path.dirname(sys.executable), 'pkgs', '.trash') + trash_dir = os.path.join(os.path.dirname(sys.executable), "pkgs", ".trash") if os.path.isdir(trash_dir): # We don't really care if this does a complete job. # Cleaning up some files is better than none. @@ -3037,11 +3570,13 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # This loop recursively builds dependencies if recipes exist try: recipe = recipe_list.popleft() - name = recipe.name() if hasattr(recipe, 'name') else recipe - if hasattr(recipe, 'config'): + name = recipe.name() if hasattr(recipe, "name") else recipe + if hasattr(recipe, "config"): metadata = recipe cfg = metadata.config - cfg.anaconda_upload = config.anaconda_upload # copy over anaconda_upload setting + cfg.anaconda_upload = ( + config.anaconda_upload + ) # copy over anaconda_upload setting # this code is duplicated below because we need to be sure that the build id is set # before downloading happens - or else we lose where downloads are @@ -3051,15 +3586,19 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F to_build_recursive.append(metadata.name()) if not metadata.final: - variants_ = (dict_of_lists_to_list_of_dicts(variants) if variants else - get_package_variants(metadata)) + variants_ = ( + dict_of_lists_to_list_of_dicts(variants) + if variants + else get_package_variants(metadata) + ) # This is where reparsing happens - we need to re-evaluate the meta.yaml for any # jinja2 templating - metadata_tuples = distribute_variants(metadata, variants_, - permit_unsatisfiable_variants=False) + metadata_tuples = distribute_variants( + metadata, variants_, permit_unsatisfiable_variants=False + ) else: - metadata_tuples = ((metadata, False, False), ) + metadata_tuples = ((metadata, False, False),) else: cfg = config @@ -3070,94 +3609,147 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # each tuple is: # metadata, need_source_download, need_reparse_in_env = # We get one tuple per variant - metadata_tuples = render_recipe(recipe, config=cfg, variants=variants, - permit_unsatisfiable_variants=False, - reset_build_id=not cfg.dirty, - bypass_env_check=True) - # restrict to building only one variant for bdist_conda. The way it splits the build - # job breaks variants horribly. + metadata_tuples = render_recipe( + recipe, + config=cfg, + variants=variants, + permit_unsatisfiable_variants=False, + reset_build_id=not cfg.dirty, + bypass_env_check=True, + ) + if post in (True, False): metadata_tuples = metadata_tuples[:1] # This is the "TOP LEVEL" loop. Only vars used in the top-level # recipe are looped over here. - for (metadata, need_source_download, need_reparse_in_env) in metadata_tuples: + for metadata, need_source_download, need_reparse_in_env in metadata_tuples: get_all_replacements(metadata.config.variant) if post is None: utils.rm_rf(metadata.config.host_prefix) utils.rm_rf(metadata.config.build_prefix) utils.rm_rf(metadata.config.test_prefix) if metadata.name() not in metadata.config.build_folder: - metadata.config.compute_build_id(metadata.name(), metadata.version(), reset=True) - - packages_from_this = build(metadata, stats, - post=post, - need_source_download=need_source_download, - need_reparse_in_env=need_reparse_in_env, - built_packages=built_packages, - notest=notest, - ) + metadata.config.compute_build_id( + metadata.name(), metadata.version(), reset=True + ) + + packages_from_this = build( + metadata, + stats, + post=post, + need_source_download=need_source_download, + need_reparse_in_env=need_reparse_in_env, + built_packages=built_packages, + notest=notest, + ) if not notest: for pkg, dict_and_meta in packages_from_this.items(): - if pkg.endswith(CONDA_PACKAGE_EXTENSIONS) and os.path.isfile(pkg): + if pkg.endswith(CONDA_PACKAGE_EXTENSIONS) and os.path.isfile( + pkg + ): # we only know how to test conda packages test(pkg, config=metadata.config.copy(), stats=stats) _, meta = dict_and_meta - downstreams = meta.meta.get('test', {}).get('downstreams') + downstreams = meta.meta.get("test", {}).get("downstreams") if downstreams: - channel_urls = tuple(utils.ensure_list(metadata.config.channel_urls) + - [utils.path2url(os.path.abspath(os.path.dirname( - os.path.dirname(pkg))))]) + channel_urls = tuple( + utils.ensure_list(metadata.config.channel_urls) + + [ + utils.path2url( + os.path.abspath( + os.path.dirname(os.path.dirname(pkg)) + ) + ) + ] + ) log = utils.get_logger(__name__) # downstreams can be a dict, for adding capability for worker labels - if hasattr(downstreams, 'keys'): + if hasattr(downstreams, "keys"): downstreams = list(downstreams.keys()) - log.warn("Dictionary keys for downstreams are being " - "ignored right now. Coming soon...") + log.warn( + "Dictionary keys for downstreams are being " + "ignored right now. Coming soon..." + ) else: downstreams = utils.ensure_list(downstreams) for dep in downstreams: log.info(f"Testing downstream package: {dep}") # resolve downstream packages to a known package - r_string = ''.join(random.choice( - string.ascii_uppercase + string.digits) for _ in range(10)) - specs = meta.ms_depends('run') + [MatchSpec(dep), - MatchSpec(' '.join(meta.dist().rsplit('-', 2)))] - specs = [utils.ensure_valid_spec(spec) for spec in specs] + r_string = "".join( + random.choice( + string.ascii_uppercase + string.digits + ) + for _ in range(10) + ) + specs = meta.ms_depends("run") + [ + MatchSpec(dep), + MatchSpec(" ".join(meta.dist().rsplit("-", 2))), + ] + specs = [ + utils.ensure_valid_spec(spec) for spec in specs + ] try: - with TemporaryDirectory(prefix="_", suffix=r_string) as tmpdir: - actions = environ.get_install_actions( - tmpdir, specs, env='run', + with TemporaryDirectory( + prefix="_", suffix=r_string + ) as tmpdir: + precs = environ.get_package_records( + tmpdir, + specs, + env="run", subdir=meta.config.host_subdir, bldpkgs_dirs=meta.config.bldpkgs_dirs, - channel_urls=channel_urls) - except (UnsatisfiableError, DependencyNeedsBuildingError) as e: - log.warn("Skipping downstream test for spec {}; was " - "unsatisfiable. Error was {}".format(dep, e)) + channel_urls=channel_urls, + ) + except ( + UnsatisfiableError, + DependencyNeedsBuildingError, + ) as e: + log.warn( + f"Skipping downstream test for spec {dep}; was " + f"unsatisfiable. Error was {e}" + ) continue # make sure to download that package to the local cache if not there - local_file = execute_download_actions(meta, actions, 'host', - package_subset=dep, - require_files=True) + local_file = execute_download_actions( + meta, + precs, + "host", + package_subset=[dep], + require_files=True, + ) # test that package, using the local channel so that our new # upstream dep gets used - test(list(local_file.values())[0][0], - config=meta.config.copy(), stats=stats) + test( + list(local_file.values())[0][0], + config=meta.config.copy(), + stats=stats, + ) built_packages.update({pkg: dict_and_meta}) else: built_packages.update(packages_from_this) - if (os.path.exists(metadata.config.work_dir) and not - (metadata.config.dirty or metadata.config.keep_old_work or - metadata.get_value('build/no_move_top_level_workdir_loops'))): + if os.path.exists(metadata.config.work_dir) and not ( + metadata.config.dirty + or metadata.config.keep_old_work + or metadata.get_value("build/no_move_top_level_workdir_loops") + ): # force the build string to include hashes as necessary metadata.final = True - dest = os.path.join(os.path.dirname(metadata.config.work_dir), - '_'.join(('work_moved', metadata.dist(), - metadata.config.host_subdir, "main_build_loop"))) + dest = os.path.join( + os.path.dirname(metadata.config.work_dir), + "_".join( + ( + "work_moved", + metadata.dist(), + metadata.config.host_subdir, + "main_build_loop", + ) + ), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(metadata.config.work_dir, dest, "work") @@ -3170,23 +3762,31 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # os.unlink(os.path.join(metadata.config.work_dir, 'metadata_conda_debug.yaml')) except DependencyNeedsBuildingError as e: - skip_names = ['python', 'r', 'r-base', 'mro-base', 'perl', 'lua'] + skip_names = ["python", "r", "r-base", "mro-base", "perl", "lua"] built_package_paths = [entry[1][1].path for entry in built_packages.items()] add_recipes = [] # add the failed one back in at the beginning - but its deps may come before it recipe_list.extendleft([recipe]) for pkg, matchspec in zip(e.packages, e.matchspecs): - pkg_name = pkg.split(' ')[0].split('=')[0] + pkg_name = pkg.split(" ")[0].split("=")[0] # if we hit missing dependencies at test time, the error we get says that our # package that we just built needs to be built. Very confusing. Bomb out # if any of our output metadatas are in the exception list of pkgs. - if metadata and any(pkg_name == output_meta.name() for (_, output_meta) in - metadata.get_output_metadata_set(permit_undefined_jinja=True)): + if metadata and any( + pkg_name == output_meta.name() + for (_, output_meta) in metadata.get_output_metadata_set( + permit_undefined_jinja=True + ) + ): raise if pkg in to_build_recursive: cfg.clean(remove_folders=False) - raise RuntimeError("Can't build {} due to environment creation error:\n" - .format(recipe) + str(e.message) + "\n" + extra_help) + raise RuntimeError( + f"Can't build {recipe} due to environment creation error:\n" + + str(e.message) + + "\n" + + extra_help + ) if pkg in skip_names: to_build_recursive.append(pkg) @@ -3201,22 +3801,32 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F recipe_glob = glob(os.path.join(recipe_parent_dir, pkg_name)) # conda-forge style. meta.yaml lives one level deeper. if not recipe_glob: - recipe_glob = glob(os.path.join(recipe_parent_dir, '..', pkg_name)) - feedstock_glob = glob(os.path.join(recipe_parent_dir, pkg_name + '-feedstock')) + recipe_glob = glob(os.path.join(recipe_parent_dir, "..", pkg_name)) + feedstock_glob = glob( + os.path.join(recipe_parent_dir, pkg_name + "-feedstock") + ) if not feedstock_glob: - feedstock_glob = glob(os.path.join(recipe_parent_dir, '..', - pkg_name + '-feedstock')) + feedstock_glob = glob( + os.path.join(recipe_parent_dir, "..", pkg_name + "-feedstock") + ) available = False if recipe_glob or feedstock_glob: for recipe_dir in recipe_glob + feedstock_glob: - if not any(path.startswith(recipe_dir) for path in built_package_paths): - dep_metas = render_recipe(recipe_dir, config=metadata.config) + if not any( + path.startswith(recipe_dir) for path in built_package_paths + ): + dep_metas = render_recipe( + recipe_dir, config=metadata.config + ) for dep_meta in dep_metas: - if utils.match_peer_job(MatchSpec(matchspec), dep_meta[0], - metadata): - print(("Missing dependency {0}, but found" + - " recipe directory, so building " + - "{0} first").format(pkg)) + if utils.match_peer_job( + MatchSpec(matchspec), dep_meta[0], metadata + ): + print( + f"Missing dependency {pkg}, but found " + f"recipe directory, so building " + f"{pkg} first" + ) add_recipes.append(recipe_dir) available = True if not available: @@ -3224,33 +3834,43 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F raise # if we failed to render due to unsatisfiable dependencies, we should only bail out # if we've already retried this recipe. - if (not metadata and retried_recipes.count(recipe) and - retried_recipes.count(recipe) >= len(metadata.ms_depends('build'))): + if ( + not metadata + and retried_recipes.count(recipe) + and retried_recipes.count(recipe) >= len(metadata.ms_depends("build")) + ): cfg.clean(remove_folders=False) - raise RuntimeError("Can't build {} due to environment creation error:\n" - .format(recipe) + str(e.message) + "\n" + extra_help) + raise RuntimeError( + f"Can't build {recipe} due to environment creation error:\n" + + str(e.message) + + "\n" + + extra_help + ) retried_recipes.append(os.path.basename(name)) recipe_list.extendleft(add_recipes) tarballs = [f for f in built_packages if f.endswith(CONDA_PACKAGE_EXTENSIONS)] if post in [True, None]: # TODO: could probably use a better check for pkg type than this... - wheels = [f for f in built_packages if f.endswith('.whl')] + wheels = [f for f in built_packages if f.endswith(".whl")] handle_anaconda_upload(tarballs, config=config) handle_pypi_upload(wheels, config=config) # Print the variant information for each package because it is very opaque and never printed. - from conda_build.inspect_pkg import get_hash_input + from .inspect_pkg import get_hash_input + hash_inputs = get_hash_input(tarballs) - print("\nINFO :: The inputs making up the hashes for the built packages are as follows:") + print( + "\nINFO :: The inputs making up the hashes for the built packages are as follows:" + ) print(json.dumps(hash_inputs, sort_keys=True, indent=2)) print("\n") total_time = time.time() - initial_time - max_memory_used = max([step.get('rss') for step in stats.values()] or [0]) - total_disk = sum([step.get('disk') for step in stats.values()] or [0]) - total_cpu_sys = sum([step.get('cpu_sys') for step in stats.values()] or [0]) - total_cpu_user = sum([step.get('cpu_user') for step in stats.values()] or [0]) + max_memory_used = max([step.get("rss") for step in stats.values()] or [0]) + total_disk = sum([step.get("disk") for step in stats.values()] or [0]) + total_cpu_sys = sum([step.get("cpu_sys") for step in stats.values()] or [0]) + total_cpu_user = sum([step.get("cpu_user") for step in stats.values()] or [0]) print( "{bar}\n" @@ -3269,21 +3889,21 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F ) ) - stats['total'] = { - 'time': total_time, - 'memory': max_memory_used, - 'disk': total_disk, + stats["total"] = { + "time": total_time, + "memory": max_memory_used, + "disk": total_disk, } if config.stats_file: - with open(config.stats_file, 'w') as f: + with open(config.stats_file, "w") as f: json.dump(stats, f) return list(built_packages.keys()) def handle_anaconda_upload(paths, config): - from conda_build.os_utils.external import find_executable + from .os_utils.external import find_executable paths = utils.ensure_list(paths) @@ -3298,24 +3918,23 @@ def handle_anaconda_upload(paths, config): else: upload = True - anaconda = find_executable('anaconda') + anaconda = find_executable("anaconda") - no_upload_message = '' - if not utils.on_win or 'MSYSTEM' in os.environ: + no_upload_message = "" + if not utils.on_win or "MSYSTEM" in os.environ: joiner = " \\\n " - prompter = '' + prompter = "" else: joiner = " ^\n " - prompter = '$ ' + prompter = "$ " if not upload or anaconda is None: no_upload_message = ( - "# If you want to upload package(s) to anaconda.org later, type:\n" - "\n" + "# If you want to upload package(s) to anaconda.org later, type:\n\n" ) no_upload_message += ( "\n" "# To have conda build upload to anaconda.org automatically, use\n" - "# {}conda config --set anaconda_upload yes\n".format(prompter) + f"# {prompter}conda config --set anaconda_upload yes\n" ) no_upload_message += f"anaconda upload{joiner}" + joiner.join(paths) @@ -3323,24 +3942,26 @@ def handle_anaconda_upload(paths, config): print(no_upload_message) return - if anaconda is None: + if not anaconda: print(no_upload_message) sys.exit( "Error: cannot locate anaconda command (required for upload)\n" "# Try:\n" - "# {}conda install anaconda-client".format(prompter) + f"# {prompter}conda install anaconda-client" ) - cmd = [anaconda, ] + cmd = [ + anaconda, + ] if config.token: - cmd.extend(['--token', config.token]) - cmd.append('upload') + cmd.extend(["--token", config.token]) + cmd.append("upload") if config.force_upload: - cmd.append('--force') + cmd.append("--force") if config.user: - cmd.extend(['--user', config.user]) + cmd.extend(["--user", config.user]) for label in config.labels: - cmd.extend(['--label', label]) + cmd.extend(["--label", label]) for package in paths: try: print(f"Uploading {os.path.basename(package)} to anaconda.org") @@ -3351,19 +3972,26 @@ def handle_anaconda_upload(paths, config): def handle_pypi_upload(wheels, config): - args = ['twine', 'upload', '--sign-with', config.sign_with, '--repository', config.repository] + args = [ + "twine", + "upload", + "--sign-with", + config.sign_with, + "--repository", + config.repository, + ] if config.user: - args.extend(['--user', config.user]) + args.extend(["--user", config.user]) if config.password: - args.extend(['--password', config.password]) + args.extend(["--password", config.password]) if config.sign: - args.extend(['--sign']) + args.extend(["--sign"]) if config.identity: - args.extend(['--identity', config.identity]) + args.extend(["--identity", config.identity]) if config.config_file: - args.extend(['--config-file', config.config_file]) + args.extend(["--config-file", config.config_file]) if config.repository: - args.extend(['--repository', config.repository]) + args.extend(["--repository", config.repository]) wheels = utils.ensure_list(wheels) @@ -3373,8 +4001,10 @@ def handle_pypi_upload(wheels, config): try: utils.check_call_env(args + [f]) except: - utils.get_logger(__name__).warn("wheel upload failed - is twine installed?" - " Is this package registered?") + utils.get_logger(__name__).warn( + "wheel upload failed - is twine installed?" + " Is this package registered?" + ) utils.get_logger(__name__).warn(f"Wheel file left in {f}") else: @@ -3383,7 +4013,7 @@ def handle_pypi_upload(wheels, config): def print_build_intermediate_warning(config): print("\n") - print('#' * 84) + print("#" * 84) print("Source and build intermediates have been left in " + config.croot + ".") build_folders = utils.get_build_folders(config.croot) print(f"There are currently {len(build_folders)} accumulated.") @@ -3398,26 +4028,25 @@ def clean_build(config, folders=None): def is_package_built(metadata, env, include_local=True): + # bldpkgs_dirs is typically {'$ENVIRONMENT/conda-bld/noarch', '$ENVIRONMENT/conda-bld/osx-arm64'} + # could pop subdirs (last path element) and call update_index() once for d in metadata.config.bldpkgs_dirs: if not os.path.isdir(d): os.makedirs(d) - update_index(d, verbose=metadata.config.debug, warn=False, threads=1) - subdir = getattr(metadata.config, f'{env}_subdir') + _delegated_update_index(d, verbose=metadata.config.debug, warn=False, threads=1) + subdir = getattr(metadata.config, f"{env}_subdir") - urls = [url_path(metadata.config.output_folder), 'local'] if include_local else [] - urls += get_rc_urls() + urls = [ + *([url_path(metadata.config.output_folder), "local"] if include_local else []), + *context.channels, + ] if metadata.config.channel_urls: urls.extend(metadata.config.channel_urls) - spec = MatchSpec(name=metadata.name(), version=metadata.version(), build=metadata.build_id()) + spec = MatchSpec( + name=metadata.name(), version=metadata.version(), build=metadata.build_id() + ) + + from conda.api import SubdirData - if conda_45: - from conda.api import SubdirData - return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) - else: - index, _, _ = get_build_index(subdir=subdir, bldpkgs_dir=metadata.config.bldpkgs_dir, - output_folder=metadata.config.output_folder, channel_urls=urls, - debug=metadata.config.debug, verbose=metadata.config.verbose, - locking=metadata.config.locking, timeout=metadata.config.timeout, - clear_cache=True) - return any(spec.match(prec) for prec in index.values()) + return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) diff --git a/conda_build/cli-32.exe b/conda_build/cli-32.exe index b17d9c7b23..eaf5188c31 100755 Binary files a/conda_build/cli-32.exe and b/conda_build/cli-32.exe differ diff --git a/conda_build/cli-64.exe b/conda_build/cli-64.exe index 7b7f9c67d2..0251e7a4bc 100755 Binary files a/conda_build/cli-64.exe and b/conda_build/cli-64.exe differ diff --git a/conda_build/cli/actions.py b/conda_build/cli/actions.py index 0067f370de..93281d9b62 100644 --- a/conda_build/cli/actions.py +++ b/conda_build/cli/actions.py @@ -8,11 +8,11 @@ def __call__(self, parser, namespace, items, option_string=None): setattr(namespace, self.dest, dict()) for item in items: - key, value = item.split('=') + key, value = item.split("=") if key in getattr(namespace, self.dest): raise KeyError( - f"Key {key} cannot be overwritten. " - "It's likely that the key you've used " - "is already in use by conda-build." - ) + f"Key {key} cannot be overwritten. " + "It's likely that the key you've used " + "is already in use by conda-build." + ) getattr(namespace, self.dest)[key] = value diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 8582247606..a966677471 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -1,33 +1,46 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse -from glob2 import glob -from itertools import chain import logging -from os.path import abspath, expanduser, expandvars -from pathlib import Path import sys import warnings +from glob import glob +from itertools import chain +from os.path import abspath, expanduser, expandvars +from pathlib import Path +from typing import TYPE_CHECKING from conda.auxlib.ish import dals +from conda.base.context import context from conda.common.io import dashlist -import filelock -import conda_build.api as api -import conda_build.build as build -import conda_build.utils as utils -from conda_build.conda_interface import (add_parser_channels, binstar_upload, - cc_conda_build) -from conda_build.cli.main_render import get_render_parser -from conda_build.cli.actions import KeyValueAction -import conda_build.source as source -from conda_build.utils import LoggingContext -from conda_build.config import Config, zstd_compression_level_default, get_channel_urls +from .. import api, build, source, utils +from ..config import ( + get_channel_urls, + get_or_merge_config, + zstd_compression_level_default, +) +from ..utils import LoggingContext +from .actions import KeyValueAction +from .main_render import get_render_parser + +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence -def parse_args(args): - p = get_render_parser() - p.description = dals( + +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = get_render_parser() + parser.prog = "conda build" + parser.description = dals( """ Tool for building conda packages. A conda package is a binary tarball containing system-level libraries, Python modules, executable programs, or @@ -36,346 +49,445 @@ def parse_args(args): different sets of packages. """ ) - p.add_argument( + parser.add_argument( "--check", action="store_true", help="Only check (validate) the recipe.", ) - p.add_argument( + parser.add_argument( "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", - dest='anaconda_upload', - default=binstar_upload, + dest="anaconda_upload", + default=context.binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, - dest='anaconda_upload', - default=binstar_upload, + dest="anaconda_upload", + default=context.binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-include-recipe", action="store_false", help="Don't include the recipe inside the built package.", - dest='include_recipe', - default=cc_conda_build.get('include_recipe', 'true').lower() == 'true', + dest="include_recipe", + default=context.conda_build.get("include_recipe", "true").lower() == "true", ) - p.add_argument( - '-s', "--source", + parser.add_argument( + "-s", + "--source", action="store_true", help="Only obtain the source (but don't build).", ) - p.add_argument( - '-t', "--test", + parser.add_argument( + "-t", + "--test", action="store_true", help="Test package (assumes package is already built). RECIPE_DIR argument must be a " "path to built package .tar.bz2 file.", ) - p.add_argument( - '--no-test', - action='store_true', - dest='notest', + parser.add_argument( + "--no-test", + action="store_true", + dest="notest", help="Do not test the package.", ) - p.add_argument( - '-b', '--build-only', + parser.add_argument( + "-b", + "--build-only", action="store_true", help="""Only run the build, without any post processing or testing. Implies --no-test and --no-anaconda-upload.""", ) - p.add_argument( - '-p', '--post', + parser.add_argument( + "-p", + "--post", action="store_true", help="Run the post-build logic. Implies --no-anaconda-upload.", ) - p.add_argument( - '-p', '--test-run-post', + parser.add_argument( + "-p", + "--test-run-post", action="store_true", help="Run the post-build logic during testing.", ) - p.add_argument( - 'recipe', - metavar='RECIPE_PATH', - nargs='+', + parser.add_argument( + "recipe", + metavar="RECIPE_PATH", + nargs="+", help="Path to recipe directory. Pass 'purge' here to clean the " "work and test intermediates. Pass 'purge-all' to also remove " "previously built packages.", ) - p.add_argument( - '--skip-existing', - action='store_true', - help=("Skip recipes for which there already exists an existing build " - "(locally or in the channels)."), - default=cc_conda_build.get('skip_existing', 'false').lower() == 'true', - ) - p.add_argument( - '--keep-old-work', - action='store_true', - dest='keep_old_work', + parser.add_argument( + "--skip-existing", + action="store_true", + help=( + "Skip recipes for which there already exists an existing build " + "(locally or in the channels)." + ), + default=context.conda_build.get("skip_existing", "false").lower() == "true", + ) + parser.add_argument( + "--keep-old-work", + action="store_true", + dest="keep_old_work", help="Do not remove anything from environment, even after successful " - "build and test." + "build and test.", ) - p.add_argument( - '--dirty', - action='store_true', - help='Do not remove work directory or _build environment, ' - 'to speed up debugging. Does not apply patches or download source.' + parser.add_argument( + "--dirty", + action="store_true", + help="Do not remove work directory or _build environment, " + "to speed up debugging. Does not apply patches or download source.", ) - p.add_argument( - '-q', "--quiet", + parser.add_argument( + "-q", + "--quiet", action="store_true", help="do not display progress bar", - default=cc_conda_build.get('quiet', 'false').lower() == 'true', + default=context.conda_build.get("quiet", "false").lower() == "true", ) - p.add_argument( - '--debug', + parser.add_argument( + "--debug", action="store_true", help="Show debug output from source checkouts and conda", ) - p.add_argument( - '--token', + parser.add_argument( + "--token", help="Token to pass through to anaconda upload", - default=cc_conda_build.get('anaconda_token'), + default=context.conda_build.get("anaconda_token"), ) - p.add_argument( - '--user', + parser.add_argument( + "--user", help="User/organization to upload packages to on anaconda.org or pypi", - default=cc_conda_build.get('user'), + default=context.conda_build.get("user"), ) - p.add_argument( - '--label', action='append', dest='labels', default=[], + parser.add_argument( + "--label", + action="append", + dest="labels", + default=[], help="Label argument to pass through to anaconda upload", ) - p.add_argument( - '--no-force-upload', + parser.add_argument( + "--no-force-upload", help="Disable force upload to anaconda.org, preventing overwriting any existing packages", - dest='force_upload', + dest="force_upload", default=True, - action='store_false', + action="store_false", ) - p.add_argument( + parser.add_argument( "--zstd-compression-level", - help=("When building v2 packages, set the compression level used by " - "conda-package-handling. Defaults to the maximum."), + help=( + "When building v2 packages, set the compression level used by " + "conda-package-handling. " + f"Defaults to {zstd_compression_level_default}." + ), type=int, choices=range(1, 23), - default=cc_conda_build.get('zstd_compression_level', zstd_compression_level_default), + default=context.conda_build.get( + "zstd_compression_level", zstd_compression_level_default + ), ) - pypi_grp = p.add_argument_group("PyPI upload parameters (twine)") + pypi_grp = parser.add_argument_group("PyPI upload parameters (twine)") pypi_grp.add_argument( - '--password', + "--password", help="password to use when uploading packages to pypi", ) pypi_grp.add_argument( - '--sign', default=False, - help="sign files when uploading to pypi" + "--sign", default=False, help="sign files when uploading to pypi" ) pypi_grp.add_argument( - '--sign-with', default='gpg', dest='sign_with', - help="program to use to sign files when uploading to pypi" + "--sign-with", + default="gpg", + dest="sign_with", + help="program to use to sign files when uploading to pypi", ) pypi_grp.add_argument( - '--identity', - help="GPG identity to use to sign files when uploading to pypi" + "--identity", help="GPG identity to use to sign files when uploading to pypi" ) pypi_grp.add_argument( - '--config-file', + "--config-file", help="path to .pypirc file to use when uploading to pypi", - default=(abspath(expanduser(expandvars(cc_conda_build.get('pypirc')))) - if cc_conda_build.get('pypirc') - else cc_conda_build.get('pypirc')), + default=( + abspath(expanduser(expandvars(pypirc))) + if (pypirc := context.conda_build.get("pypirc")) + else None + ), ) pypi_grp.add_argument( - '--repository', '-r', help="PyPI repository to upload to", - default=cc_conda_build.get('pypi_repository', 'pypitest'), + "--repository", + "-r", + help="PyPI repository to upload to", + default=context.conda_build.get("pypi_repository", "pypitest"), ) - p.add_argument( + parser.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", - dest='activate', - default=cc_conda_build.get('activate', 'true').lower() == 'true', + dest="activate", + default=context.conda_build.get("activate", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-build-id", action="store_false", - help=("do not generate unique build folder names. Use if having issues with " - "paths being too long. Deprecated, please use --build-id-pat='' instead"), - dest='set_build_id', + help=( + "do not generate unique build folder names. Use if having issues with " + "paths being too long. Deprecated, please use --build-id-pat='' instead" + ), + dest="set_build_id", # note: inverted - dest stores positive logic - default=cc_conda_build.get('set_build_id', 'true').lower() == 'true', + default=context.conda_build.get("set_build_id", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--build-id-pat", - help=("specify a templated pattern to use as build folder names. Use if having issues with " - "paths being too long."), - dest='build_id_pat', - default=cc_conda_build.get('build_id_pat', '{n}_{t}'), - ) - p.add_argument( + help=( + "specify a templated pattern to use as build folder names. Use if having issues with " + "paths being too long." + ), + dest="build_id_pat", + default=context.conda_build.get("build_id_pat", "{n}_{t}"), + ) + parser.add_argument( "--croot", - help=("Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " - "to this call of conda-build.") + help=( + "Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " + "to this call of conda-build." + ), ) - p.add_argument( + parser.add_argument( "--verify", action="store_true", help="run verification on recipes or packages when building", - default=cc_conda_build.get('verify', 'true').lower() == 'true', + default=context.conda_build.get("verify", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", - default=cc_conda_build.get('verify', 'true').lower() == 'true', + default=context.conda_build.get("verify", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", - default=cc_conda_build.get('exit_on_verify_error', 'false').lower() == 'true', + default=context.conda_build.get("exit_on_verify_error", "false").lower() + == "true", ) - p.add_argument( + parser.add_argument( "--output-folder", - help=("folder to dump output package to. Package are moved here if build or test succeeds." - " Destination folder must exist prior to using this."), - default=cc_conda_build.get('output_folder') - ) - p.add_argument( - "--no-prefix-length-fallback", dest='prefix_length_fallback', + help=( + "folder to dump output package to. Package are moved here if build or test succeeds." + " Destination folder must exist prior to using this." + ), + default=context.conda_build.get("output_folder"), + ) + parser.add_argument( + "--no-prefix-length-fallback", + dest="prefix_length_fallback", action="store_false", - help=("Disable fallback to older 80 character prefix length if environment creation" - " fails due to insufficient prefix length in dependency packages"), + help=( + "Disable fallback to older 80 character prefix length if environment creation" + " fails due to insufficient prefix length in dependency packages" + ), default=True, ) - p.add_argument( - "--prefix-length-fallback", dest='prefix_length_fallback', + parser.add_argument( + "--prefix-length-fallback", + dest="prefix_length_fallback", action="store_true", - help=("Disable fallback to older 80 character prefix length if environment creation" - " fails due to insufficient prefix length in dependency packages"), + help=( + "Disable fallback to older 80 character prefix length if environment creation" + " fails due to insufficient prefix length in dependency packages" + ), # this default will change to false in the future, when we deem that the community has # had enough time to build long-prefix length packages. default=True, ) - p.add_argument( - "--prefix-length", dest='_prefix_length', - help=("length of build prefix. For packages with binaries that embed the path, this is" - " critical to ensuring that your package can run as many places as possible. Note" - "that this value can be altered by the OS below conda-build (e.g. encrypted " - "filesystems on Linux), and you should prefer to set --croot to a non-encrypted " - "location instead, so that you maintain a known prefix length."), + parser.add_argument( + "--prefix-length", + dest="_prefix_length", + help=( + "length of build prefix. For packages with binaries that embed the path, this is" + " critical to ensuring that your package can run as many places as possible. Note" + "that this value can be altered by the OS below conda-build (e.g. encrypted " + "filesystems on Linux), and you should prefer to set --croot to a non-encrypted " + "location instead, so that you maintain a known prefix length." + ), # this default will change to false in the future, when we deem that the community has # had enough time to build long-prefix length packages. - default=255, type=int, - ) - p.add_argument( - "--no-locking", dest='locking', default=True, action="store_false", - help=("Disable locking, to avoid unresolved race condition issues. Unsafe to run multiple " - "builds at once on one system with this set.") - ) - p.add_argument( - "--no-remove-work-dir", dest='remove_work_dir', default=True, action="store_false", - help=("Disable removal of the work dir before testing. Be careful using this option, as" - " you package may depend on files that are not included in the package, and may pass " - "tests, but ultimately fail on installed systems.") - ) - p.add_argument( - "--error-overlinking", dest='error_overlinking', action="store_true", - help=("Enable error when shared libraries from transitive dependencies are directly " - "linked to any executables or shared libraries in built packages. This is disabled " - "by default, but will be enabled by default in conda-build 4.0."), - default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true', - ) - p.add_argument( - "--no-error-overlinking", dest='error_overlinking', action="store_false", - help=("Disable error when shared libraries from transitive dependencies are directly " - "linked to any executables or shared libraries in built packages. This is currently " - "the default behavior, but will change in conda-build 4.0."), - default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true', - ) - p.add_argument( - "--error-overdepending", dest='error_overdepending', action="store_true", - help=("Enable error when packages with names beginning `lib` or which have " - "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " - "any of the files in this package."), - default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', - ) - p.add_argument( - "--no-error-overdepending", dest='error_overdepending', action="store_false", - help=("Disable error when packages with names beginning `lib` or which have " - "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " - "any of the files in this package."), - default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', - ) - p.add_argument( - "--long-test-prefix", action="store_true", - help=("Use a long prefix for the test prefix, as well as the build prefix. Affects only " - "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " - "default in conda-build 3.0+"), - default=cc_conda_build.get('long_test_prefix', 'true').lower() == 'true', - ) - p.add_argument( - "--no-long-test-prefix", dest="long_test_prefix", action="store_false", - help=("Do not use a long prefix for the test prefix, as well as the build prefix." - " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. "), - default=cc_conda_build.get('long_test_prefix', 'true').lower() == 'true', - ) - p.add_argument( - '--keep-going', '-k', action='store_true', - help=("When running tests, keep going after each failure. Default is to stop on the first " - "failure.") - ) - p.add_argument( - '--cache-dir', - help=('Path to store the source files (archives, git clones, etc.) during the build.'), - default=(abspath(expanduser(expandvars(cc_conda_build.get('cache_dir')))) - if cc_conda_build.get('cache_dir') - else cc_conda_build.get('cache_dir')), - ) - p.add_argument( - "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", - default=cc_conda_build.get('copy_test_source_files', 'true').lower() == 'true', - help=("Disables copying the files necessary for testing the package into " - "the info/test folder. Passing this argument means it may not be possible " - "to test the package without internet access. There is also a danger that " - "the source archive(s) containing the files could become unavailable sometime " - "in the future.") - ) - p.add_argument( - '--merge-build-host', action="store_true", - help=('Merge the build and host directories, even when host section or compiler ' - 'jinja2 is present'), - default=cc_conda_build.get('merge_build_host', 'false').lower() == 'true', - ) - p.add_argument('--stats-file', help=('File path to save build statistics to. Stats are ' - 'in JSON format'), ) - p.add_argument('--extra-deps', - nargs='+', - help=('Extra dependencies to add to all environment creation steps. This ' - 'is only enabled for testing with the -t or --test flag. Change ' - 'meta.yaml or use templates otherwise.'), ) - p.add_argument( - '--extra-meta', - nargs='*', + default=255, + type=int, + ) + parser.add_argument( + "--no-locking", + dest="locking", + default=True, + action="store_false", + help=( + "Disable locking, to avoid unresolved race condition issues. Unsafe to run multiple " + "builds at once on one system with this set." + ), + ) + parser.add_argument( + "--no-remove-work-dir", + dest="remove_work_dir", + default=True, + action="store_false", + help=( + "Disable removal of the work dir before testing. Be careful using this option, as" + " you package may depend on files that are not included in the package, and may pass " + "tests, but ultimately fail on installed systems." + ), + ) + parser.add_argument( + "--error-overlinking", + dest="error_overlinking", + action="store_true", + help=( + "Enable error when shared libraries from transitive dependencies are directly " + "linked to any executables or shared libraries in built packages. This is disabled " + "by default, but will be enabled by default in conda-build 4.0." + ), + default=context.conda_build.get("error_overlinking", "false").lower() == "true", + ) + parser.add_argument( + "--no-error-overlinking", + dest="error_overlinking", + action="store_false", + help=( + "Disable error when shared libraries from transitive dependencies are directly " + "linked to any executables or shared libraries in built packages. This is currently " + "the default behavior, but will change in conda-build 4.0." + ), + default=context.conda_build.get("error_overlinking", "false").lower() == "true", + ) + parser.add_argument( + "--error-overdepending", + dest="error_overdepending", + action="store_true", + help=( + "Enable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package." + ), + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", + ) + parser.add_argument( + "--no-error-overdepending", + dest="error_overdepending", + action="store_false", + help=( + "Disable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package." + ), + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", + ) + parser.add_argument( + "--long-test-prefix", + action="store_true", + help=( + "Use a long prefix for the test prefix, as well as the build prefix. Affects only " + "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " + "default in conda-build 3.0+" + ), + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", + ) + parser.add_argument( + "--no-long-test-prefix", + dest="long_test_prefix", + action="store_false", + help=( + "Do not use a long prefix for the test prefix, as well as the build prefix." + " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. " + ), + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", + ) + parser.add_argument( + "--keep-going", + "-k", + action="store_true", + help=( + "When running tests, keep going after each failure. Default is to stop on the first " + "failure." + ), + ) + parser.add_argument( + "--cache-dir", + help=( + "Path to store the source files (archives, git clones, etc.) during the build." + ), + default=( + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) + else None + ), + ) + parser.add_argument( + "--no-copy-test-source-files", + dest="copy_test_source_files", + action="store_false", + default=context.conda_build.get("copy_test_source_files", "true").lower() + == "true", + help=( + "Disables copying the files necessary for testing the package into " + "the info/test folder. Passing this argument means it may not be possible " + "to test the package without internet access. There is also a danger that " + "the source archive(s) containing the files could become unavailable sometime " + "in the future." + ), + ) + parser.add_argument( + "--merge-build-host", + action="store_true", + help=( + "Merge the build and host directories, even when host section or compiler " + "jinja2 is present" + ), + default=context.conda_build.get("merge_build_host", "false").lower() == "true", + ) + parser.add_argument( + "--stats-file", + help="File path to save build statistics to. Stats are in JSON format", + ) + parser.add_argument( + "--extra-deps", + nargs="+", + help=( + "Extra dependencies to add to all environment creation steps. This " + "is only enabled for testing with the -t or --test flag. Change " + "meta.yaml or use templates otherwise." + ), + ) + parser.add_argument( + "--extra-meta", + nargs="*", action=KeyValueAction, help="Key value pairs of metadata to add to about.json. Should be " "defined as Key=Value with a space separating each pair.", metavar="KEY=VALUE", ) - p.add_argument('--suppress-variables', - action='store_true', - help=("Do not display value of environment variables specified in build.script_env."), ) - - add_parser_channels(p) - args = p.parse_args(args) + parser.add_argument( + "--suppress-variables", + action="store_true", + help=( + "Do not display value of environment variables specified in build.script_env." + ), + ) - check_recipe(args.recipe) + add_parser_channels(parser) - return p, args + parsed = parser.parse_args(args) + check_recipe(parsed.recipe) + return parser, parsed def check_recipe(path_list): @@ -392,7 +504,7 @@ def check_recipe(path_list): "It should be a path to a folder.\n" "Forcing conda-build to use the recipe file." ), - UserWarning + UserWarning, ) @@ -401,13 +513,13 @@ def output_action(recipe, config): config.verbose = False config.debug = False paths = api.get_output_file_paths(recipe, config=config) - print('\n'.join(sorted(paths))) + print("\n".join(sorted(paths))) def source_action(recipe, config): metadata = api.render(recipe, config=config)[0][0] source.provide(metadata) - print('Source tree in:', metadata.config.work_dir) + print("Source tree in:", metadata.config.work_dir) def test_action(recipe, config): @@ -418,44 +530,46 @@ def check_action(recipe, config): return api.check(recipe, config=config) -def execute(args): - _parser, args = parse_args(args) - config = Config(**args.__dict__) +def execute(args: Sequence[str] | None = None) -> int: + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + config = get_or_merge_config(None, **parsed.__dict__) build.check_external() # change globals in build module, see comment there as well - config.channel_urls = get_channel_urls(args.__dict__) + config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = args.override_channels - config.verbose = not args.quiet or args.debug + config.verbose = not parsed.quiet or parsed.debug - if 'purge' in args.recipe: + if "purge" in parsed.recipe: build.clean_build(config) - return + return 0 - if 'purge-all' in args.recipe: + if "purge-all" in parsed.recipe: build.clean_build(config) config.clean_pkgs() - return + return 0 - outputs = None - if args.output: + if parsed.output: config.verbose = False config.quiet = True config.debug = False - outputs = [output_action(recipe, config) for recipe in args.recipe] - elif args.test: - outputs = [] + for recipe in parsed.recipe: + output_action(recipe, config) + return 0 + + if parsed.test: failed_recipes = [] recipes = chain.from_iterable( - glob(abspath(recipe)) if "*" in recipe else [recipe] - for recipe in args.recipe + glob(abspath(recipe), recursive=True) if "*" in recipe else [recipe] + for recipe in parsed.recipe ) for recipe in recipes: try: test_action(recipe, config) except: - if not args.keep_going: + if not parsed.keep_going: raise else: failed_recipes.append(recipe) @@ -466,37 +580,27 @@ def execute(args): sys.exit(len(failed_recipes)) else: print("All tests passed") - elif args.source: - outputs = [source_action(recipe, config) for recipe in args.recipe] - elif args.check: - outputs = [check_action(recipe, config) for recipe in args.recipe] + elif parsed.source: + for recipe in parsed.recipe: + source_action(recipe, config) + elif parsed.check: + for recipe in parsed.recipe: + check_action(recipe, config) else: - outputs = api.build( - args.recipe, - post=args.post, - test_run_post=args.test_run_post, - build_only=args.build_only, - notest=args.notest, + api.build( + parsed.recipe, + post=parsed.post, + test_run_post=parsed.test_run_post, + build_only=parsed.build_only, + notest=parsed.notest, already_built=None, config=config, - verify=args.verify, - variants=args.variants, - cache_dir=args.cache_dir, + verify=parsed.verify, + variants=parsed.variants, + cache_dir=parsed.cache_dir, ) - if not args.output and len(utils.get_build_folders(config.croot)) > 0: + if utils.get_build_folders(config.croot): build.print_build_intermediate_warning(config) - return outputs - -def main(): - try: - execute(sys.argv[1:]) - except RuntimeError as e: - print(str(e)) - sys.exit(1) - except filelock.Timeout as e: - print("File lock on {} could not be obtained. You might need to try fewer builds at once." - " Otherwise, run conda clean --lock".format(e.lock_file)) - sys.exit(1) - return + return 0 diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index e1ada47bf9..d30b725b3d 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -1,12 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging from os.path import abspath, expanduser -import sys +from typing import TYPE_CHECKING + +from conda.base.context import context -from conda_build.conda_interface import ArgumentParser +from .. import api -from conda_build import api +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence logging.basicConfig(level=logging.INFO) @@ -35,8 +41,11 @@ """ -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + + parser = ArgumentParser( + prog="conda convert", description=""" Various tools to convert conda packages. Takes a pure Python package build for one platform and converts it to work on one or more other platforms, or @@ -45,77 +54,87 @@ def parse_args(args): ) # TODO: Factor this into a subcommand, since it's python package specific - p.add_argument( - 'files', - nargs='+', - help="Package files to convert." - ) - p.add_argument( - '-p', "--platform", - dest='platforms', + parser.add_argument("files", nargs="+", help="Package files to convert.") + parser.add_argument( + "-p", + "--platform", + dest="platforms", action="append", - choices=['osx-64', 'osx-arm64', - 'linux-32', 'linux-64', 'linux-ppc64', 'linux-ppc64le', - 'linux-s390x', 'linux-armv6l', 'linux-armv7l', 'linux-aarch64', - 'win-32', 'win-64', 'win-arm64', 'all'], + choices=[ + "osx-64", + "osx-arm64", + "linux-32", + "linux-64", + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + "win-32", + "win-64", + "win-arm64", + "all", + ], help="Platform to convert the packages to.", - default=None + default=None, ) - p.add_argument( - "--dependencies", "-d", - nargs='*', + parser.add_argument( + "--dependencies", + "-d", + nargs="*", help="""Additional (besides python) dependencies of the converted package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", ) - p.add_argument( - '--show-imports', - action='store_true', + parser.add_argument( + "--show-imports", + action="store_true", default=False, help="Show Python imports for compiled parts of the package.", ) - p.add_argument( - '-f', "--force", + parser.add_argument( + "-f", + "--force", action="store_true", help="Force convert, even when a package has compiled C extensions.", ) - p.add_argument( - '-o', '--output-dir', - default='.', + parser.add_argument( + "-o", + "--output-dir", + default=".", help="""Directory to write the output files. The packages will be organized in platform/ subdirectories, e.g., - win-32/package-1.0-py27_0.tar.bz2.""" + win-32/package-1.0-py27_0.tar.bz2.""", ) - p.add_argument( - '-v', '--verbose', + parser.add_argument( + "-v", + "--verbose", default=False, - action='store_true', - help="Print verbose output." + action="store_true", + help="Print verbose output.", ) - p.add_argument( + parser.add_argument( "--dry-run", action="store_true", help="Only display what would have been done.", ) - p.add_argument( - "-q", "--quiet", - action="store_true", - help="Don't print as much output." + parser.add_argument( + "-q", "--quiet", action="store_true", help="Don't print as much output." ) - args = p.parse_args(args) - return p, args + return parser, parser.parse_args(args) -def execute(args): - _, args = parse_args(args) - files = args.files - del args.__dict__['files'] +def execute(args: Sequence[str] | None = None) -> int: + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + files = parsed.files + del parsed.__dict__["files"] for f in files: f = abspath(expanduser(f)) - api.convert(f, **args.__dict__) - + api.convert(f, **parsed.__dict__) -def main(): - return execute(sys.argv[1:]) + return 0 diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 22cdf2cb73..731f964217 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -1,15 +1,21 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging import sys -from argparse import ArgumentParser +from typing import TYPE_CHECKING + +from conda.base.context import context -from conda_build import api -from conda_build.utils import on_win -# we extend the render parser because we basically need to render the recipe before -# we can say what env to create. This is not really true for debugging tests, but meh... -from conda_build.cli.main_render import get_render_parser -from conda_build.cli import validators as valid +from .. import api +from ..utils import on_win +from . import validators as valid +from .main_render import get_render_parser + +if TYPE_CHECKING: + from argparse import ArgumentParser + from typing import Sequence logging.basicConfig(level=logging.INFO) @@ -17,6 +23,7 @@ def get_parser() -> ArgumentParser: """Returns a parser object for this command""" p = get_render_parser() + p.prog = "conda debug" p.description = """ Set up environments and activation scripts to debug your build or test phase. @@ -24,51 +31,88 @@ def get_parser() -> ArgumentParser: """ # we do this one separately because we only allow one entry to conda render p.add_argument( - 'recipe_or_package_file_path', - help=("Path to recipe directory or package file to use for dependency and source information. " - "If you use a recipe, you get the build/host env and source work directory. If you use " - "a package file, you get the test environments and the test_tmp folder."), - type=valid.validate_is_conda_pkg_or_recipe_dir + "recipe_or_package_file_path", + help=( + "Path to recipe directory or package file to use for dependency and source information. " + "If you use a recipe, you get the build/host env and source work directory. If you use " + "a package file, you get the test environments and the test_tmp folder." + ), + type=valid.validate_is_conda_pkg_or_recipe_dir, + ) + p.add_argument( + "-p", + "--path", + help=( + "root path in which to place envs, source and activation script. Defaults to a " + "standard conda-build work folder (packagename_timestamp) in your conda-bld folder." + ), + ) + p.add_argument( + "-o", + "--output-id", + help=( + "fnmatch pattern that is associated with the output that you want to create an env for. " + "Must match only one file, as we don't support creating envs for more than one output at a time. " + "The top-level recipe can be specified by passing 'TOPLEVEL' here" + ), + ) + p.add_argument( + "-a", + "--activate-string-only", + action="store_true", + help="Output only the string to the used generated activation script. Use this for creating envs in scripted " + "environments.", ) - p.add_argument("-p", "--path", - help=("root path in which to place envs, source and activation script. Defaults to a " - "standard conda-build work folder (packagename_timestamp) in your conda-bld folder.")) - p.add_argument("-o", "--output-id", - help=("fnmatch pattern that is associated with the output that you want to create an env for. " - "Must match only one file, as we don't support creating envs for more than one output at a time. " - "The top-level recipe can be specified by passing 'TOPLEVEL' here")) - p.add_argument("-a", "--activate-string-only", action="store_true", - help="Output only the string to the used generated activation script. Use this for creating envs in scripted " - "environments.") # cut out some args from render that don't make sense here # https://stackoverflow.com/a/32809642/1170370 - p._handle_conflict_resolve(None, [('--output', [_ for _ in p._actions if _.option_strings == ['--output']][0])]) - p._handle_conflict_resolve(None, [('--bootstrap', [_ for _ in p._actions if _.option_strings == ['--bootstrap']][0])]) - p._handle_conflict_resolve(None, [('--old-build-string', [_ for _ in p._actions if - _.option_strings == ['--old-build-string']][0])]) + p._handle_conflict_resolve( + None, + [("--output", [_ for _ in p._actions if _.option_strings == ["--output"]][0])], + ) + p._handle_conflict_resolve( + None, + [ + ( + "--bootstrap", + [_ for _ in p._actions if _.option_strings == ["--bootstrap"]][0], + ) + ], + ) + p._handle_conflict_resolve( + None, + [ + ( + "--old-build-string", + [_ for _ in p._actions if _.option_strings == ["--old-build-string"]][ + 0 + ], + ) + ], + ) return p -def execute(): +def execute(args: Sequence[str] | None = None) -> int: parser = get_parser() - args = parser.parse_args() + parsed = parser.parse_args(args) + context.__init__(argparse_args=parsed) try: activation_string = api.debug( - args.recipe_or_package_file_path, - verbose=(not args.activate_string_only), - **args.__dict__ + parsed.recipe_or_package_file_path, + verbose=(not parsed.activate_string_only), + **parsed.__dict__, ) - if not args.activate_string_only: + if not parsed.activate_string_only: print("#" * 80) print( "Test environment created for debugging. To enter a debugging environment:\n" ) print(activation_string) - if not args.activate_string_only: + if not parsed.activate_string_only: test_file = "conda_test_runner.bat" if on_win else "conda_test_runner.sh" print( f"To run your tests, you might want to start with running the {test_file} file." @@ -76,9 +120,9 @@ def execute(): print("#" * 80) except ValueError as e: - print(f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr) + print( + f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr + ) sys.exit(1) - -def main(): - return execute() + return 0 diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 7fc473725c..9b680cbf5a 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -1,66 +1,102 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging -import sys +from typing import TYPE_CHECKING + +from conda.base.context import context -from conda_build.conda_interface import ArgumentParser, add_parser_prefix, get_prefix -from conda_build import api +from .. import api +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + + parser = ArgumentParser( + prog="conda develop", description=""" Install a Python package in 'development mode'. -This works by creating a conda.pth file in site-packages.""" +This works by creating a conda.pth file in site-packages.""", # TODO: Use setup.py to determine any entry-points to install. ) - p.add_argument( - 'source', - metavar='PATH', - nargs='+', - help="Path to the source directory." + parser.add_argument( + "source", metavar="PATH", nargs="+", help="Path to the source directory." + ) + parser.add_argument( + "-npf", + "--no-pth-file", + action="store_true", + help=( + "Relink compiled extension dependencies against " + "libraries found in current conda env. " + "Do not add source to conda.pth." + ), + ) + parser.add_argument( + "-b", + "--build_ext", + action="store_true", + help=( + "Build extensions inplace, invoking: " + "python setup.py build_ext --inplace; " + "add to conda.pth; relink runtime libraries to " + "environment's lib/." + ), + ) + parser.add_argument( + "-c", + "--clean", + action="store_true", + help=( + "Invoke clean on setup.py: " + "python setup.py clean " + "use with build_ext to clean before building." + ), + ) + parser.add_argument( + "-u", + "--uninstall", + action="store_true", + help=( + "Removes package if installed in 'development mode' " + "by deleting path from conda.pth file. Ignore other " + "options - just uninstall and exit" + ), ) - p.add_argument('-npf', '--no-pth-file', - action='store_true', - help=("Relink compiled extension dependencies against " - "libraries found in current conda env. " - "Do not add source to conda.pth.")) - p.add_argument('-b', '--build_ext', - action='store_true', - help=("Build extensions inplace, invoking: " - "python setup.py build_ext --inplace; " - "add to conda.pth; relink runtime libraries to " - "environment's lib/.")) - p.add_argument('-c', '--clean', - action='store_true', - help=("Invoke clean on setup.py: " - "python setup.py clean " - "use with build_ext to clean before building.")) - p.add_argument('-u', '--uninstall', - action='store_true', - help=("Removes package if installed in 'development mode' " - "by deleting path from conda.pth file. Ignore other " - "options - just uninstall and exit")) - - add_parser_prefix(p) - p.set_defaults(func=execute) - - args = p.parse_args(args) - return p, args - - -def execute(args): - _, args = parse_args(args) - prefix = get_prefix(args) - api.develop(args.source, prefix=prefix, no_pth_file=args.no_pth_file, - build_ext=args.build_ext, clean=args.clean, uninstall=args.uninstall) - - -def main(): - return execute(sys.argv[1:]) + + add_parser_prefix(parser) + parser.set_defaults(func=execute) + + return parser, parser.parse_args(args) + + +def execute(args: Sequence[str] | None = None) -> int: + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + api.develop( + parsed.source, + prefix=context.target_prefix, + no_pth_file=parsed.no_pth_file, + build_ext=parsed.build_ext, + clean=parsed.clean, + uninstall=parsed.uninstall, + ) + + return 0 diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py deleted file mode 100644 index 9d3f4009b9..0000000000 --- a/conda_build/cli/main_index.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import logging -import os -import sys - -from conda_build.conda_interface import ArgumentParser - -from conda_build import api -from conda_build.index import MAX_THREADS_DEFAULT -from conda_build.utils import DEFAULT_SUBDIRS - -logging.basicConfig(level=logging.INFO) - - -def parse_args(args): - p = ArgumentParser( - description="Update package index metadata files in given directories.") - - p.add_argument( - 'dir', - help='Directory that contains an index to be updated.', - nargs='*', - default=[os.getcwd()], - ) - - p.add_argument( - '-c', "--check-md5", - action="store_true", - help="""Use hash values instead of file modification times for determining if a - package's metadata needs to be updated.""", - ) - p.add_argument( - "-n", "--channel-name", - help="Customize the channel name listed in each channel's index.html.", - ) - p.add_argument( - '-s', '--subdir', - action='append', - help='Optional. The subdir to index. Can be given multiple times. If not provided, will ' - 'default to all of %s. If provided, will not create channeldata.json for the channel.' - '' % ', '.join(DEFAULT_SUBDIRS), - ) - p.add_argument( - '-t', '--threads', - default=MAX_THREADS_DEFAULT, - type=int, - ) - p.add_argument( - "-p", "--patch-generator", - help='Path to Python file that outputs metadata patch instructions from its ' - '_patch_repodata function or a .tar.bz2/.conda file which contains a ' - 'patch_instructions.json file for each subdir' - ) - p.add_argument( - "--hotfix-source-repo", - help="Deprecated, will be removed in a future version of conda build" - ) - p.add_argument( - "--verbose", help="show extra debugging info", action="store_true" - ) - p.add_argument( - "--no-progress", help="Hide progress bars", action="store_false", dest="progress" - ) - p.add_argument( - "--current-index-versions-file", "-m", - help=""" - YAML file containing name of package as key, and list of versions as values. The current_index.json - will contain the newest from this series of versions. For example: - - python: - - 2.7 - - 3.6 - - will keep python 2.7.X and 3.6.Y in the current_index.json, instead of only the very latest python version. - """ - ) - p.add_argument( - "-f", "--file", - help="A file that contains a new line separated list of packages to add to repodata.", - action="store" - ) - - args = p.parse_args(args) - return p, args - - -def execute(args): - _, args = parse_args(args) - - api.update_index(args.dir, check_md5=args.check_md5, channel_name=args.channel_name, - threads=args.threads, subdir=args.subdir, patch_generator=args.patch_generator, - verbose=args.verbose, progress=args.progress, hotfix_source_repo=args.hotfix_source_repo, - current_index_versions=args.current_index_versions_file, index_file=args.file) - - -def main(): - return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 21c09a570b..b1c47c0586 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -1,28 +1,43 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging +import sys from os.path import expanduser from pprint import pprint -import sys +from typing import TYPE_CHECKING + +from conda.base.context import context -from conda_build.conda_interface import ArgumentParser, add_parser_prefix, get_prefix +from .. import api -from conda_build import api +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( - description='Tools for inspecting conda packages.', +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + + parser = ArgumentParser( + prog="conda inspect", + description="Tools for inspecting conda packages.", epilog=""" Run --help on the subcommands like 'conda inspect linkages --help' to see the options available. """, - ) - subcommand = p.add_subparsers( - dest='subcommand', + subcommand = parser.add_subparsers( + dest="subcommand", ) linkages_help = """ @@ -39,39 +54,39 @@ def parse_args(args): description=linkages_help, ) linkages.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) linkages.add_argument( - '--untracked', - action='store_true', + "--untracked", + action="store_true", help="""Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) linkages.add_argument( - '--show-files', + "--show-files", action="store_true", help="Show the files in the package that link to each library", ) linkages.add_argument( - '--groupby', - action='store', - default='package', - choices=('package', 'dependency'), + "--groupby", + action="store", + default="package", + choices=("package", "dependency"), help="""Attribute to group by (default: %(default)s). Useful when used in conjunction with --all.""", ) linkages.add_argument( - '--sysroot', - action='store', - help='System root in which to look for system libraries.', - default='', + "--sysroot", + action="store", + help="System root in which to look for system libraries.", + default="", ) linkages.add_argument( - '--all', - action='store_true', + "--all", + action="store_true", help="Generate a report for all packages in the environment.", ) add_parser_prefix(linkages) @@ -88,28 +103,28 @@ def parse_args(args): description=objects_help, ) objects.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) objects.add_argument( - '--untracked', - action='store_true', + "--untracked", + action="store_true", help="""Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) # TODO: Allow groupby to include the package (like for --all) objects.add_argument( - '--groupby', - action='store', - default='filename', - choices=('filename', 'filetype', 'rpath'), - help='Attribute to group by (default: %(default)s).', + "--groupby", + action="store", + default="filename", + choices=("filename", "filetype", "rpath"), + help="Attribute to group by (default: %(default)s).", ) objects.add_argument( - '--all', - action='store_true', + "--all", + action="store_true", help="Generate a report for all packages in the environment.", ) add_parser_prefix(objects) @@ -123,22 +138,25 @@ def parse_args(args): description=channels_help, ) channels.add_argument( - '--verbose', - action='store_true', + "--verbose", + action="store_true", help="""Show verbose output. Note that error output to stderr will always be shown regardless of this flag. """, ) channels.add_argument( - '--test-installable', '-t', - action='store_true', - help="""Test every package in the channel to see if it is installable - by conda.""", + "--test-installable", + "-t", + action="store_true", + help=( + "DEPRECATED. This is the default (and only) behavior. " + "Test every package in the channel to see if it is installable by conda." + ), ) channels.add_argument( "channel", - nargs='?', + nargs="?", default="defaults", - help="The channel to test. The default is %(default)s." + help="The channel to test. The default is %(default)s.", ) prefix_lengths = subcommand.add_parser( @@ -148,60 +166,71 @@ def parse_args(args): description=linkages_help, ) prefix_lengths.add_argument( - 'packages', - action='store', - nargs='+', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="+", + help="Conda packages to inspect.", ) prefix_lengths.add_argument( - '--min-prefix-length', '-m', - help='Minimum length. Only packages with prefixes below this are shown.', + "--min-prefix-length", + "-m", + help="Minimum length. Only packages with prefixes below this are shown.", default=api.Config().prefix_length, type=int, ) hash_inputs = subcommand.add_parser( "hash-inputs", - help="Show data used to compute hash identifier (h????) for package", - description="Show data used to compute hash identifier (h????) for package", + help="Show data used to compute hash identifier for package", + description="Show data used to compute hash identifier for package", ) hash_inputs.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) - args = p.parse_args(args) - return p, args + + return parser, parser.parse_args(args) -def execute(args): - parser, args = parse_args(args) +def execute(args: Sequence[str] | None = None) -> int: + parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) - if not args.subcommand: + if not parsed.subcommand: parser.print_help() - exit() - - elif args.subcommand == 'channels': - if not args.test_installable: - parser.error("At least one option (--test-installable) is required.") - else: - print(api.test_installable(args.channel)) - elif args.subcommand == 'linkages': - print(api.inspect_linkages(args.packages, prefix=get_prefix(args), - untracked=args.untracked, all_packages=args.all, - show_files=args.show_files, groupby=args.groupby, - sysroot=expanduser(args.sysroot))) - elif args.subcommand == 'objects': - print(api.inspect_objects(args.packages, prefix=get_prefix(args), groupby=args.groupby)) - elif args.subcommand == 'prefix-lengths': - if not api.inspect_prefix_length(args.packages, min_prefix_length=args.min_prefix_length): + sys.exit(0) + elif parsed.subcommand == "channels": + print(api.test_installable(parsed.channel)) + elif parsed.subcommand == "linkages": + print( + api.inspect_linkages( + parsed.packages, + prefix=context.target_prefix, + untracked=parsed.untracked, + all_packages=parsed.all, + show_files=parsed.show_files, + groupby=parsed.groupby, + sysroot=expanduser(parsed.sysroot), + ) + ) + elif parsed.subcommand == "objects": + print( + api.inspect_objects( + parsed.packages, + prefix=context.target_prefix, + groupby=parsed.groupby, + ) + ) + elif parsed.subcommand == "prefix-lengths": + if not api.inspect_prefix_length( + parsed.packages, min_prefix_length=parsed.min_prefix_length + ): sys.exit(1) - elif args.subcommand == 'hash-inputs': - pprint(api.inspect_hash_inputs(args.packages)) + elif parsed.subcommand == "hash-inputs": + pprint(api.inspect_hash_inputs(parsed.packages)) else: - raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") - + parser.error(f"Unrecognized subcommand: {parsed.subcommand}.") -def main(): - return execute(sys.argv[1:]) + return 0 diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 5a9613c273..91d2edcebb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -1,20 +1,34 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import logging -import sys +from typing import TYPE_CHECKING + +from conda.base.context import context + +from .. import api -from conda_build.conda_interface import binstar_upload -from conda_build.conda_interface import ArgumentParser -from conda_build.conda_interface import add_parser_channels -from conda_build import api +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( - description=''' +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + + parser = ArgumentParser( + prog="conda metapackage", + description=""" Tool for building conda metapackages. A metapackage is a package with no files, only metadata. They are typically used to collect several packages together into a single package via dependencies. @@ -22,81 +36,78 @@ def parse_args(args): NOTE: Metapackages can also be created by creating a recipe with the necessary metadata in the meta.yaml, but a metapackage can be created entirely from the command line with the conda metapackage command. -''', +""", ) - p.add_argument( + parser.add_argument( "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", - dest='anaconda_upload', - default=binstar_upload, + dest="anaconda_upload", + default=context.binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, - dest='anaconda_upload', - default=binstar_upload, + dest="anaconda_upload", + default=context.binstar_upload, ) - p.add_argument( - '--token', - help="Token to pass through to anaconda upload" + parser.add_argument("--token", help="Token to pass through to anaconda upload") + parser.add_argument( + "--user", help="User/organization to upload packages to on anaconda.org" ) - p.add_argument( - '--user', - help="User/organization to upload packages to on anaconda.org" - ) - p.add_argument( - '--label', action='append', dest='labels', default=[], + parser.add_argument( + "--label", + action="append", + dest="labels", + default=[], help="Label argument to pass through to anaconda upload", ) - p.add_argument( + parser.add_argument( "name", help="Name of the created package.", ) - p.add_argument( + parser.add_argument( "version", help="Version of the created package.", ) - p.add_argument( + parser.add_argument( "--build-number", type=int, default=0, help="Build number for the package (default is 0).", ) - p.add_argument( + parser.add_argument( "--build-string", default=None, help="Build string for the package (default is automatically generated).", ) - p.add_argument( - "--dependencies", "-d", - nargs='*', + parser.add_argument( + "--dependencies", + "-d", + nargs="*", default=(), help="""The dependencies of the package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", ) - p.add_argument( + parser.add_argument( "--home", help="The homepage for the metapackage.", - ) - p.add_argument( - "--license", - help="The license of the metapackage.", - dest='license_name' + parser.add_argument( + "--license", help="The license of the metapackage.", dest="license_name" ) - p.add_argument( + parser.add_argument( "--summary", help="""Summary of the package. Pass this in as a string on the command line, like --summary 'A metapackage for X'. It is recommended to use single quotes if you are not doing variable substitution to avoid interpretation of special characters.""", ) - p.add_argument( + parser.add_argument( "--entry-points", - nargs='*', + nargs="*", default=(), help="""Python entry points to create automatically. They should use the same syntax as in the meta.yaml of a recipe, e.g., --entry-points @@ -104,16 +115,18 @@ def parse_args(args): bsdiff4 that calls bsdiff4.cli.main_bsdiff4(). """, ) - add_parser_channels(p) - args = p.parse_args(args) - return p, args + add_parser_channels(parser) + return parser, parser.parse_args(args) -def execute(args): - _, args = parse_args(args) - channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or () - api.create_metapackage(channel_urls=channel_urls, **args.__dict__) +def execute(args: Sequence[str] | None = None) -> int: + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + api.create_metapackage( + channel_urls=context.channels, + **parsed.__dict__, + ) -def main(): - return execute(sys.argv[1:]) + return 0 diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index b2280db449..6e6f2bfa41 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -1,23 +1,31 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import logging -import sys from pprint import pprint +from typing import TYPE_CHECKING import yaml +from conda.base.context import context from yaml.parser import ParserError -from conda_build.conda_interface import (ArgumentParser, add_parser_channels, - cc_conda_build) +from .. import __version__, api +from ..config import get_channel_urls, get_or_merge_config +from ..utils import LoggingContext +from ..variants import get_package_variants, set_language_env_vars -from conda_build import __version__, api +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels -from conda_build.config import get_or_merge_config, get_channel_urls -from conda_build.variants import get_package_variants, set_language_env_vars -from conda_build.utils import LoggingContext +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence -on_win = (sys.platform == 'win32') log = logging.getLogger(__name__) @@ -30,31 +38,39 @@ def __call__(self, parser, namespace, values, option_string=None): try: my_dict = yaml.load(values[0], Loader=yaml.BaseLoader) if not isinstance(my_dict, dict): - raise RuntimeError(f"The argument of {option_string} is not a YAML dictionary.") + raise RuntimeError( + f"The argument of {option_string} is not a YAML dictionary." + ) setattr(namespace, self.dest, my_dict) except ParserError as e: - raise RuntimeError(f'The argument of {option_string} is not a valid YAML. The parser error was: \n\n{str(e)}') + raise RuntimeError( + f"The argument of {option_string} is not a valid YAML. The parser error was: \n\n{str(e)}" + ) + +def get_render_parser() -> ArgumentParser: + from conda.cli.conda_argparse import ArgumentParser -def get_render_parser(): p = ArgumentParser( + prog="conda render", description=""" -Tool for building conda packages. A conda package is a binary tarball -containing system-level libraries, Python modules, executable programs, or -other components. conda keeps track of dependencies between packages and -platform specifics, making it simple to create working environments from - different sets of packages.""", - conflict_handler='resolve' +Tool for expanding the template meta.yml file (containing Jinja syntax and +selectors) into the rendered meta.yml files. The template meta.yml file is +combined with user-specified configurations, static recipe files, and +environment information to generate the rendered meta.yml files.""", + conflict_handler="resolve", ) p.add_argument( - '-V', '--version', - action='version', - help='Show the conda-build version number and exit.', - version='conda-build %s' % __version__, + "-V", + "--version", + action="version", + help="Show the conda-build version number and exit.", + version=f"conda-build {__version__}", ) p.add_argument( - '-n', "--no-source", + "-n", + "--no-source", action="store_true", help="When templating can't be completed, do not obtain the \ source to try fill in related template variables.", @@ -62,167 +78,176 @@ def get_render_parser(): p.add_argument( "--output", action="store_true", - help="Output the conda package filename which would have been " - "created", + help="Output the conda package filename which would have been created", ) p.add_argument( - '--python', + "--python", action="append", help="Set the Python version used by conda build.", ) p.add_argument( - '--perl', + "--perl", action="append", help="Set the Perl version used by conda build.", ) p.add_argument( - '--numpy', + "--numpy", action="append", help="Set the NumPy version used by conda build.", ) p.add_argument( - '--R', + "--R", action="append", help="""Set the R version used by conda build.""", - dest="r_base" + dest="r_base", ) p.add_argument( - '--lua', + "--lua", action="append", help="Set the Lua version used by conda build.", ) p.add_argument( - '--bootstrap', + "--bootstrap", help="""Provide initial configuration in addition to recipe. Can be a path to or name of an environment, which will be emulated in the package.""", ) p.add_argument( - '--append-file', + "--append-file", help="""Append data in meta.yaml with fields from this file. Jinja2 is not done on appended fields""", - dest='append_sections_file', + dest="append_sections_file", ) p.add_argument( - '--clobber-file', + "--clobber-file", help="""Clobber data in meta.yaml with fields from this file. Jinja2 is not done on clobbered fields.""", - dest='clobber_sections_file', + dest="clobber_sections_file", ) p.add_argument( - '-m', '--variant-config-files', + "-m", + "--variant-config-files", action="append", help="""Additional variant config files to add. These yaml files can contain - keys such as `c_compiler` and `target_platform` to form a build matrix.""" + keys such as `c_compiler` and `target_platform` to form a build matrix.""", ) p.add_argument( - '-e', '--exclusive-config-files', '--exclusive-config-file', + "-e", + "--exclusive-config-files", + "--exclusive-config-file", action="append", help="""Exclusive variant config files to add. Providing files here disables searching in your home directory and in cwd. The files specified here come at the start of the order, as opposed to the end with --variant-config-files. Any config files in recipes and any config files specified with --variant-config-files will - override values from these files.""" + override values from these files.""", + ) + p.add_argument( + "--old-build-string", + dest="filename_hashing", + action="store_false", + default=context.conda_build.get("filename_hashing", "true").lower() == "true", + help=( + "Disable hash additions to filenames to distinguish package " + "variants from one another. NOTE: any filename collisions are " + "yours to handle. Any variants with overlapping names within a " + "build will clobber each other." + ), ) p.add_argument( - "--old-build-string", dest="filename_hashing", action="store_false", - default=cc_conda_build.get('filename_hashing', 'true').lower() == 'true', - help=("Disable hash additions to filenames to distinguish package " - "variants from one another. NOTE: any filename collisions are " - "yours to handle. Any variants with overlapping names within a " - "build will clobber each other.") + "--use-channeldata", + action="store_true", + dest="use_channeldata", + help=( + "Use channeldata, if available, to determine run_exports. Otherwise packages " + "are downloaded to determine this information" + ), ) p.add_argument( - '--use-channeldata', - action='store_true', - dest='use_channeldata', - help=("Use channeldata, if available, to determine run_exports. Otherwise packages " - "are downloaded to determine this information") + "--variants", + nargs=1, + action=ParseYAMLArgument, + help=( + "Variants to extend the build matrix. Must be a valid YAML instance, " + 'such as "{python: [3.8, 3.9]}"' + ), ) - p.add_argument('--variants', - nargs=1, - action=ParseYAMLArgument, - help=('Variants to extend the build matrix. Must be a valid YAML instance, ' - 'such as "{python: [3.6, 3.7]}"')) add_parser_channels(p) return p -def parse_args(args): - p = get_render_parser() - p.add_argument( - '-f', '--file', +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = get_render_parser() + parser.add_argument( + "-f", + "--file", help="write YAML to file, given as argument here.\ - Overwrites existing files." + Overwrites existing files.", ) # we do this one separately because we only allow one entry to conda render - p.add_argument( - 'recipe', - metavar='RECIPE_PATH', + parser.add_argument( + "recipe", + metavar="RECIPE_PATH", help="Path to recipe directory.", ) # this is here because we have a different default than build - p.add_argument( - '--verbose', - action='store_true', - help='Enable verbose output from download tools and progress updates', + parser.add_argument( + "--verbose", + action="store_true", + help="Enable verbose output from download tools and progress updates", ) - args, _ = p.parse_known_args(args) - return p, args + + return parser, parser.parse_args(args) -def execute(args, print_results=True): - p, args = parse_args(args) +def execute(args: Sequence[str] | None = None) -> int: + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) - config = get_or_merge_config(None, **args.__dict__) + config = get_or_merge_config(None, **parsed.__dict__) + + variants = get_package_variants(parsed.recipe, config, variants=parsed.variants) + from ..build import get_all_replacements - variants = get_package_variants(args.recipe, config, variants=args.variants) - from conda_build.build import get_all_replacements get_all_replacements(variants) set_language_env_vars(variants) - config.channel_urls = get_channel_urls(args.__dict__) - - config.override_channels = args.override_channels + config.channel_urls = get_channel_urls(parsed.__dict__) - if args.output: + if parsed.output: config.verbose = False config.debug = False - metadata_tuples = api.render(args.recipe, config=config, - no_download_source=args.no_source, - variants=args.variants) - - if args.file and len(metadata_tuples) > 1: - log.warning("Multiple variants rendered. " - "Only one will be written to the file you specified ({}).".format(args.file)) - - if print_results: - if args.output: - with LoggingContext(logging.CRITICAL + 1): - paths = api.get_output_file_paths(metadata_tuples, config=config) - print('\n'.join(sorted(paths))) - if args.file: - m = metadata_tuples[-1][0] - api.output_yaml(m, args.file, suppress_outputs=True) - else: - logging.basicConfig(level=logging.INFO) - for (m, _, _) in metadata_tuples: - print("--------------") - print("Hash contents:") - print("--------------") - pprint(m.get_hash_contents()) - print("----------") - print("meta.yaml:") - print("----------") - print(api.output_yaml(m, args.file, suppress_outputs=True)) + metadata_tuples = api.render( + parsed.recipe, + config=config, + no_download_source=parsed.no_source, + variants=parsed.variants, + ) + + if parsed.file and len(metadata_tuples) > 1: + log.warning( + "Multiple variants rendered. " + f"Only one will be written to the file you specified ({parsed.file})." + ) + + if parsed.output: + with LoggingContext(logging.CRITICAL + 1): + paths = api.get_output_file_paths(metadata_tuples, config=config) + print("\n".join(sorted(paths))) + if parsed.file: + m = metadata_tuples[-1][0] + api.output_yaml(m, parsed.file, suppress_outputs=True) else: - return metadata_tuples - - -def main(): - return execute(sys.argv[1:]) - - -if __name__ == '__main__': - main() + logging.basicConfig(level=logging.INFO) + for m, _, _ in metadata_tuples: + print("--------------") + print("Hash contents:") + print("--------------") + pprint(m.get_hash_contents()) + print("----------") + print("meta.yaml:") + print("----------") + print(api.output_yaml(m, parsed.file, suppress_outputs=True)) + + return 0 diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 993d3f3ee6..7013e2ffab 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -1,22 +1,32 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import importlib +from __future__ import annotations + import logging import os import pkgutil import sys +from importlib import import_module +from typing import TYPE_CHECKING + +from conda.base.context import context -from conda_build.conda_interface import ArgumentParser +from .. import api +from ..config import Config -import conda_build.api as api -from conda_build.config import Config +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Sequence thisdir = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + + parser = ArgumentParser( + prog="conda skeleton", description=""" Generates a boilerplate/skeleton recipe, which you can then edit to create a full recipe. Some simple skeleton recipes may not even need edits. @@ -27,37 +37,38 @@ def parse_args(args): """, ) - repos = p.add_subparsers( - dest="repo" - ) + repos = parser.add_subparsers(dest="repo") - skeletons = [name for _, name, _ in - pkgutil.iter_modules([os.path.join(thisdir, '../skeletons')])] + skeletons = [ + name + for _, name, _ in pkgutil.iter_modules([os.path.join(thisdir, "../skeletons")]) + ] for skeleton in skeletons: if skeleton.startswith("_"): continue - module = importlib.import_module("conda_build.skeletons." + skeleton) + module = import_module("conda_build.skeletons." + skeleton) module.add_parser(repos) - args = p.parse_args(args) - return p, args + return parser, parser.parse_args(args) + +def execute(args: Sequence[str] | None = None) -> int: + parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) -def execute(args): - parser, args = parse_args(args) - config = Config(**args.__dict__) + config = Config(**parsed.__dict__) - if not args.repo: + if not parsed.repo: parser.print_help() sys.exit() - api.skeletonize(args.packages, args.repo, output_dir=args.output_dir, recursive=args.recursive, - version=args.version, config=config) - - -def main(): - return execute(sys.argv[1:]) - + api.skeletonize( + parsed.packages, + parsed.repo, + output_dir=parsed.output_dir, + recursive=parsed.recursive, + version=parsed.version, + config=config, + ) -if __name__ == '__main__': - main() + return 0 diff --git a/conda_build/cli/validators.py b/conda_build/cli/validators.py index fdebdcba5b..e21304e074 100644 --- a/conda_build/cli/validators.py +++ b/conda_build/cli/validators.py @@ -5,8 +5,7 @@ import os from argparse import ArgumentError -from conda_build.utils import CONDA_PACKAGE_EXTENSIONS -from conda_build import utils +from ..utils import CONDA_PACKAGE_EXTENSIONS, is_conda_pkg CONDA_PKG_OR_RECIPE_ERROR_MESSAGE = ( "\nUnable to parse provided recipe directory or package file.\n\n" @@ -21,7 +20,7 @@ def validate_is_conda_pkg_or_recipe_dir(arg_val: str) -> str: """ if os.path.isdir(arg_val): return arg_val - elif utils.is_conda_pkg(arg_val): + elif is_conda_pkg(arg_val): return arg_val else: raise ArgumentError(None, CONDA_PKG_OR_RECIPE_ERROR_MESSAGE) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 591c8ac6df..18056cc368 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -1,306 +1,550 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from functools import partial -import os -from os import lstat -from importlib import import_module - -from pkg_resources import parse_version - -from conda import __version__ as CONDA_VERSION - - -def try_exports(module, attr): - # this assumes conda.exports exists, so only use for conda 4.3 onward - try: - return getattr(import_module('conda.exports'), attr) - except AttributeError: - return getattr(import_module(module), attr) - +from __future__ import annotations + +import configparser as _configparser +import os as _os +from builtins import input as _input +from functools import partial as _partial +from importlib import import_module as _import_module +from io import StringIO as _StringIO + +from conda import __version__ +from conda.auxlib.entity import EntityEncoder as _EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER as _PREFIX_PLACEHOLDER +from conda.base.context import context as _context +from conda.base.context import determine_target_prefix as _determine_target_prefix +from conda.base.context import non_x86_machines as _non_x86_linux_machines +from conda.base.context import reset_context as _reset_context +from conda.cli.common import spec_from_line as _spec_from_line +from conda.cli.common import specs_from_args as _specs_from_args +from conda.cli.common import specs_from_url as _specs_from_url +from conda.cli.conda_argparse import ArgumentParser as _ArgumentParser +from conda.common.path import win_path_to_unix as _win_path_to_unix +from conda.common.toposort import _toposort as __toposort +from conda.core.package_cache_data import ( + ProgressiveFetchExtract as _ProgressiveFetchExtract, +) +from conda.exceptions import CondaError as _CondaError +from conda.exceptions import CondaHTTPError as _CondaHTTPError +from conda.exceptions import LinkError as _LinkError +from conda.exceptions import LockError as _LockError +from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError +from conda.exceptions import PaddingError as _PaddingError +from conda.exceptions import ResolvePackageNotFound as _ResolvePackageNotFound +from conda.exceptions import UnsatisfiableError as _UnsatisfiableError +from conda.exports import Completer as _Completer +from conda.exports import InstalledPackages as _InstalledPackages +from conda.exports import symlink_conda as _symlink_conda +from conda.gateways.connection.download import TmpDownload as _TmpDownload +from conda.gateways.connection.download import download as _download +from conda.gateways.connection.session import CondaSession as _CondaSession +from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory +from conda.gateways.disk.link import lchmod as _lchmod +from conda.misc import untracked as _untracked +from conda.misc import walk_prefix as _walk_prefix +from conda.models.channel import Channel as _Channel +from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url +from conda.models.enums import FileMode as _FileMode +from conda.models.enums import PathType as _PathType +from conda.models.match_spec import MatchSpec as _MatchSpec +from conda.models.records import PackageRecord as _PackageRecord +from conda.models.version import VersionOrder as _VersionOrder +from conda.models.version import normalized_version as _normalized_version +from conda.resolve import Resolve as _Resolve +from conda.utils import human_bytes as _human_bytes +from conda.utils import unix_path_to_win as _unix_path_to_win +from conda.utils import url_path as _url_path + +from .deprecations import deprecated +from .utils import rm_rf as _rm_rf try: - # This monkey patch is addressed at #1825. The ensure_use_local is an outdated vestige - # and no longer has any relevant effect. - import conda.cli.common - conda.cli.common.ensure_use_local = lambda x: None + from conda.cli.helpers import add_parser_channels as _add_parser_channels + from conda.cli.helpers import add_parser_prefix as _add_parser_prefix except ImportError: - # no need to patch if it doesn't exist - pass - -conda_43 = parse_version(CONDA_VERSION) >= parse_version("4.3.0a0") -conda_44 = parse_version(CONDA_VERSION) >= parse_version("4.4.0a0") -conda_45 = parse_version(CONDA_VERSION) >= parse_version("4.5.0a0") -conda_46 = parse_version(CONDA_VERSION) >= parse_version("4.6.0a0") -conda_47 = parse_version(CONDA_VERSION) >= parse_version("4.7.0a0") -conda_48 = parse_version(CONDA_VERSION) >= parse_version("4.8.0a0") -conda_411 = parse_version(CONDA_VERSION) >= parse_version("4.11.0a0") - -if conda_44: - from conda.exports import display_actions, execute_actions, execute_plan, install_actions -else: - from conda.plan import display_actions, execute_actions, execute_plan, install_actions - -display_actions, execute_actions, execute_plan = display_actions, execute_actions, execute_plan -install_actions = install_actions - -try: - # Conda 4.4+ - from conda.exports import _toposort -except ImportError: - from conda.toposort import _toposort -_toposort = _toposort - -if conda_411: - from conda.auxlib.packaging import _get_version_from_git_tag -else: - from conda._vendor.auxlib.packaging import _get_version_from_git_tag -get_version_from_git_tag = _get_version_from_git_tag - -from conda.exports import TmpDownload, download, handle_proxy_407 # NOQA -from conda.exports import untracked, walk_prefix # NOQA -from conda.exports import MatchSpec, NoPackagesFound, Resolve, Unsatisfiable, normalized_version # NOQA -from conda.exports import human_bytes, hashsum_file, md5_file, memoized, unix_path_to_win, win_path_to_unix, url_path # NOQA -from conda.exports import get_index # NOQA -from conda.exports import (Completer, InstalledPackages, add_parser_channels, # NOQA - add_parser_prefix, # NOQA - specs_from_args, spec_from_line, specs_from_url) # NOQA -from conda.exports import ArgumentParser # NOQA -from conda.exports import (is_linked, linked, linked_data, prefix_placeholder, # NOQA - rm_rf, symlink_conda, package_cache) # NOQA -from conda.exports import CondaSession # NOQA -from conda.exports import (StringIO, input, lchmod, # NOQA - TemporaryDirectory) # NOQA -from conda.exports import VersionOrder # NOQA - - -TmpDownload = TmpDownload -download, handle_proxy_407, untracked, walk_prefix = download, handle_proxy_407, untracked, walk_prefix # NOQA -MatchSpec, Resolve, normalized_version = MatchSpec, Resolve, normalized_version -human_bytes, hashsum_file, md5_file, memoized = human_bytes, hashsum_file, md5_file, memoized -unix_path_to_win, win_path_to_unix, url_path = unix_path_to_win, win_path_to_unix, url_path -get_index, Completer, InstalledPackages = get_index, Completer, InstalledPackages -add_parser_channels, add_parser_prefix = add_parser_channels, add_parser_prefix -specs_from_args, spec_from_line, specs_from_url = specs_from_args, spec_from_line, specs_from_url -is_linked, linked, linked_data, prefix_placeholder = is_linked, linked, linked_data, prefix_placeholder # NOQA -rm_rf, symlink_conda, package_cache = rm_rf, symlink_conda, package_cache -input, lchmod = input, lchmod -TemporaryDirectory = TemporaryDirectory -ArgumentParser, CondaSession, VersionOrder = ArgumentParser, CondaSession, VersionOrder - - -from conda.core.package_cache import ProgressiveFetchExtract # NOQA -from conda.models.dist import Dist, IndexRecord # NOQA - -ProgressiveFetchExtract = ProgressiveFetchExtract -Dist, IndexRecord = Dist, IndexRecord - -import configparser # NOQA -configparser = configparser - - -from conda.exports import FileMode, PathType # NOQA -FileMode, PathType = FileMode, PathType -from conda.exports import EntityEncoder # NOQA - -EntityEncoder, FileMode, PathType = EntityEncoder, FileMode, PathType - - -CondaError = try_exports("conda.exceptions", "CondaError") -CondaHTTPError = try_exports("conda.exceptions", "CondaHTTPError") -LinkError = try_exports("conda.exceptions", "LinkError") -LockError = try_exports("conda.exceptions", "LockError") -NoPackagesFoundError = try_exports("conda.exceptions", "NoPackagesFoundError") -PaddingError = try_exports("conda.exceptions", "PaddingError") -UnsatisfiableError = try_exports("conda.exceptions", "UnsatisfiableError") - -non_x86_linux_machines = try_exports("conda.base.context", "non_x86_linux_machines") -context = try_exports("conda.base.context", "context") -context_get_prefix = try_exports("conda.base.context", "get_prefix") -reset_context = try_exports("conda.base.context", "reset_context") -get_conda_build_local_url = try_exports("conda.models.channel", "get_conda_build_local_url") - -binstar_upload = context.binstar_upload -bits = context.bits -default_python = context.default_python -envs_dirs = context.envs_dirs -pkgs_dirs = list(context.pkgs_dirs) -cc_platform = context.platform -root_dir = context.root_dir -root_writable = context.root_writable -subdir = context.subdir -create_default_packages = context.create_default_packages - -get_rc_urls = lambda: list(context.channels) -get_prefix = partial(context_get_prefix, context) -cc_conda_build = context.conda_build if hasattr(context, 'conda_build') else {} - -try: - from conda.exports import Channel -except: - from conda.models.channel import Channel -get_conda_channel = Channel.from_value - -# disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. -os.environ['CONDA_ALLOW_SOFTLINKS'] = 'false' -reset_context() - -get_local_urls = lambda: list(get_conda_build_local_url()) or [] -arch_name = context.arch_name - - -CondaError, CondaHTTPError, get_prefix, LinkError = CondaError, CondaHTTPError, get_prefix, LinkError # NOQA -LockError, non_x86_linux_machines, NoPackagesFoundError = LockError, non_x86_linux_machines, NoPackagesFoundError # NOQA -PaddingError, UnsatisfiableError = PaddingError, UnsatisfiableError - - -# work-around for python bug on Windows prior to python 3.2 -# https://bugs.python.org/issue10027 -# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation -class CrossPlatformStLink: - _st_nlink = None - - def __call__(self, path): - return self.st_nlink(path) - - @classmethod - def st_nlink(cls, path): - if cls._st_nlink is None: - cls._initialize() - return cls._st_nlink(path) - - @classmethod - def _standard_st_nlink(cls, path): - return lstat(path).st_nlink - - @classmethod - def _windows_st_nlink(cls, path): - st_nlink = cls._standard_st_nlink(path) - if st_nlink != 0: - return st_nlink - else: - # cannot trust python on Windows when st_nlink == 0 - # get value using windows libraries to be sure of its true value - # Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation - GENERIC_READ = 0x80000000 - FILE_SHARE_READ = 0x00000001 - OPEN_EXISTING = 3 - hfile = cls.CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None, - OPEN_EXISTING, 0, None) - if hfile is None: - from ctypes import WinError - raise WinError( - "Could not determine determine number of hardlinks for %s" % path) - info = cls.BY_HANDLE_FILE_INFORMATION() - rv = cls.GetFileInformationByHandle(hfile, info) - cls.CloseHandle(hfile) - if rv == 0: - from ctypes import WinError - raise WinError("Could not determine file information for %s" % path) - return info.nNumberOfLinks - - @classmethod - def _initialize(cls): - if os.name != 'nt': - cls._st_nlink = cls._standard_st_nlink - else: - # http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858 - import ctypes - from ctypes import POINTER - from ctypes.wintypes import DWORD, HANDLE, BOOL - - cls.CreateFile = ctypes.windll.kernel32.CreateFileW - cls.CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, - DWORD, DWORD, HANDLE] - cls.CreateFile.restype = HANDLE - - # http://msdn.microsoft.com/en-us/library/windows/desktop/ms724211 - cls.CloseHandle = ctypes.windll.kernel32.CloseHandle - cls.CloseHandle.argtypes = [HANDLE] - cls.CloseHandle.restype = BOOL - - class FILETIME(ctypes.Structure): - _fields_ = [("dwLowDateTime", DWORD), - ("dwHighDateTime", DWORD)] - - class BY_HANDLE_FILE_INFORMATION(ctypes.Structure): - _fields_ = [("dwFileAttributes", DWORD), - ("ftCreationTime", FILETIME), - ("ftLastAccessTime", FILETIME), - ("ftLastWriteTime", FILETIME), - ("dwVolumeSerialNumber", DWORD), - ("nFileSizeHigh", DWORD), - ("nFileSizeLow", DWORD), - ("nNumberOfLinks", DWORD), - ("nFileIndexHigh", DWORD), - ("nFileIndexLow", DWORD)] - - cls.BY_HANDLE_FILE_INFORMATION = BY_HANDLE_FILE_INFORMATION - - # http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952 - cls.GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle - cls.GetFileInformationByHandle.argtypes = [HANDLE, - POINTER(BY_HANDLE_FILE_INFORMATION)] - cls.GetFileInformationByHandle.restype = BOOL - - cls._st_nlink = cls._windows_st_nlink - - -class SignatureError(Exception): - pass - - -def which_package(path): - """ - given the path (of a (presumably) conda installed file) iterate over - the conda packages the file came from. Usually the iteration yields - only one package. - """ - from os.path import abspath, join - path = abspath(path) - prefix = which_prefix(path) - if prefix is None: - raise RuntimeError("could not determine conda prefix from: %s" % path) - for dist in linked(prefix): - meta = is_linked(prefix, dist) - if any(abspath(join(prefix, f)) == path for f in meta['files']): - yield dist - - -def which_prefix(path): - """ - given the path (to a (presumably) conda installed file) return the - environment prefix in which the file in located - """ - from os.path import abspath, join, isdir, dirname - prefix = abspath(path) - iteration = 0 - while iteration < 20: - if isdir(join(prefix, 'conda-meta')): - # we found the it, so let's return it - break - if prefix == dirname(prefix): - # we cannot chop off any more directories, so we didn't find it - prefix = None - break - prefix = dirname(prefix) - iteration += 1 - return prefix - - -def get_installed_version(prefix, pkgs): - """Primarily used by conda-forge, but may be useful in general for checking when a package - needs to be updated""" - from conda_build.utils import ensure_list - pkgs = ensure_list(pkgs) - linked_pkgs = linked(prefix) - versions = {} - for pkg in pkgs: - vers_inst = [dist.split('::', 1)[-1].rsplit('-', 2)[1] for dist in linked_pkgs - if dist.split('::', 1)[-1].rsplit('-', 2)[0] == pkg] - versions[pkg] = vers_inst[0] if len(vers_inst) == 1 else None - return versions - - -# when deactivating envs (e.g. switching from root to build/test) this env var is used, -# except the PR that removed this has been reverted (for now) and Windows doesnt need it. -env_path_backup_var_exists = os.environ.get('CONDA_PATH_BACKUP', None) + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels as _add_parser_channels + from conda.cli.conda_argparse import add_parser_prefix as _add_parser_prefix + +deprecated.constant( + "24.5", + "24.7", + "Completer", + _Completer, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaSession", + _CondaSession, + addendum="Use `conda.gateways.connection.session.CondaSession` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "InstalledPackages", + _InstalledPackages, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "NoPackagesFound", + _ResolvePackageNotFound, + addendum="Use `conda.exceptions.ResolvePackageNotFound` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Unsatisfiable", + _UnsatisfiableError, + addendum="Use `conda.exceptions.UnsatisfiableError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "symlink_conda", + _symlink_conda, + addendum="Unused.", +) + + +deprecated.constant( + "24.5", + "24.7", + "ArgumentParser", + _ArgumentParser, + addendum="Use `conda.cli.conda_argparse.ArgumentParser` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_channels", + _add_parser_channels, + addendum="Use `conda.cli.helpers.add_parser_channels` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_prefix", + _add_parser_prefix, + addendum="Use `conda.cli.helpers.add_parser_prefix` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "Channel", + _Channel, + addendum="Use `conda.models.channel.Channel` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "FileMode", + _FileMode, + addendum="Use `conda.models.enums.FileMode` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PathType", + _PathType, + addendum="Use `conda.models.enums.PathType` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "MatchSpec", + _MatchSpec, + addendum="Use `conda.models.match_spec.MatchSpec` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PackageRecord", + _PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "VersionOrder", + _VersionOrder, + addendum="Use `conda.models.version.VersionOrder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "normalized_version", + _normalized_version, + addendum="Use `conda.models.version.normalized_version` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "EntityEncoder", + _EntityEncoder, + addendum="Use `conda.auxlib.entity.EntityEncoder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Resolve", + _Resolve, + addendum="Use `conda.resolve.Resolve` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TemporaryDirectory", + _TemporaryDirectory, + addendum="Use `conda.gateways.disk.create.TemporaryDirectory` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TmpDownload", + _TmpDownload, + addendum="Use `conda.gateways.connection.download.TmpDownload` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "download", + _download, + addendum="Use `conda.gateways.connection.download.download` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "_toposort", + __toposort, + addendum="Use `conda.common.toposort._toposort` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "human_bytes", + _human_bytes, + addendum="Use `conda.utils.human_bytes` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "lchmod", + _lchmod, + addendum="Use `conda.gateways.disk.link.lchmod` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "prefix_placeholder", + _PREFIX_PLACEHOLDER, + addendum="Use `conda.base.constants.PREFIX_PLACEHOLDER` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "rm_rf", + _rm_rf, + addendum="Use `conda_build.utils.rm_rf` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "spec_from_line", + _spec_from_line, + addendum="Use `conda.cli.common.spec_from_line` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_args", + _specs_from_args, + addendum="Use `conda.cli.common.specs_from_args` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_url", + _specs_from_url, + addendum="Use `conda.cli.common.specs_from_url` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "unix_path_to_win", + _unix_path_to_win, + addendum="Use `conda.utils.unix_path_to_win` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "untracked", + _untracked, + addendum="Use `conda.misc.untracked` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "url_path", + _url_path, + addendum="Use `conda.utils.url_path` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "walk_prefix", + _walk_prefix, + addendum="Use `conda.misc.walk_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "win_path_to_unix", + _win_path_to_unix, + addendum="Use `conda.common.path.win_path_to_unix` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "configparser", + _configparser, + addendum="Use `configparser` instead.", +) +deprecated.constant("24.5", "24.7", "os", _os, addendum="Use `os` instead.") +deprecated.constant( + "24.5", + "24.7", + "partial", + _partial, + addendum="Use `functools.partial` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "import_module", + _import_module, + addendum="Use `importlib.import_module` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "StringIO", + _StringIO, + addendum="Use `io.StringIO` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "input", + _input, + addendum="Use `input` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "context", + _context, + addendum="Use `conda.base.context.context` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "determine_target_prefix", + _determine_target_prefix, + addendum="Use `conda.base.context.determine_target_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "non_x86_linux_machines", + _non_x86_linux_machines, + addendum="Use `conda.base.context.non_x86_machines` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "ProgressiveFetchExtract", + _ProgressiveFetchExtract, + addendum="Use `conda.core.package_cache_data.ProgressiveFetchExtract` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaError", + _CondaError, + addendum="Use `conda.exceptions.CondaError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaHTTPError", + _CondaHTTPError, + addendum="Use `conda.exceptions.CondaHTTPError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "LinkError", + _LinkError, + addendum="Use `conda.exceptions.LinkError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "LockError", + _LockError, + addendum="Use `conda.exceptions.LockError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "NoPackagesFoundError", + _NoPackagesFoundError, + addendum="Use `conda.exceptions.NoPackagesFoundError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PaddingError", + _PaddingError, + addendum="Use `conda.exceptions.PaddingError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "UnsatisfiableError", + _UnsatisfiableError, + addendum="Use `conda.exceptions.UnsatisfiableError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "get_conda_build_local_url", + _get_conda_build_local_url, + addendum="Use `conda.models.channel.get_conda_build_local_url` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "reset_context", + _reset_context, + addendum="Use `conda.base.context.reset_context` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "binstar_upload", + _context.binstar_upload, + addendum="Use `conda.base.context.context.binstar_upload` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "default_python", + _context.default_python, + addendum="Use `conda.base.context.context.default_python` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "envs_dirs", + _context.envs_dirs, + addendum="Use `conda.base.context.context.envs_dirs` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "pkgs_dirs", + list(_context.pkgs_dirs), + addendum="Use `conda.base.context.context.pkgs_dirs` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "cc_platform", + _context.platform, + addendum="Use `conda.base.context.context.platform` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "root_dir", + _context.root_prefix, + addendum="Use `conda.base.context.context.root_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "root_writable", + _context.root_writable, + addendum="Use `conda.base.context.context.root_writable` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "subdir", + _context.subdir, + addendum="Use `conda.base.context.context.subdir` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "create_default_packages", + _context.create_default_packages, + addendum="Use `conda.base.context.context.create_default_packages` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "get_rc_urls", + lambda: list(_context.channels), + addendum="Use `conda.base.context.context.channels` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "get_prefix", + _partial(_determine_target_prefix, _context), + addendum="Use `conda.base.context.context.target_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "cc_conda_build", + _context.conda_build, + addendum="Use `conda.base.context.context.conda_build` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "get_conda_channel", + _Channel.from_value, + addendum="Use `conda.models.channel.Channel.from_value` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "env_path_backup_var_exists", + _os.getenv("CONDA_PATH_BACKUP"), + addendum="Unused.", +) + + +deprecated.constant( + "24.5", + "24.7", + "CONDA_VERSION", + __version__, + addendum="Use `conda.__version__` instead.", +) diff --git a/conda_build/config.py b/conda_build/config.py index 0551068b46..09ce6b0718 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -1,29 +1,39 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module to store conda build settings. -''' +""" + +from __future__ import annotations import copy -from collections import namedtuple import math import os -from os.path import abspath, expanduser, join, expandvars import re import shutil -import sys import time - -from .conda_interface import root_dir, root_writable -from .conda_interface import binstar_upload +from collections import namedtuple +from os.path import abspath, expanduser, expandvars, join +from typing import TYPE_CHECKING + +from conda.base.context import context +from conda.utils import url_path + +from .deprecations import deprecated +from .utils import ( + get_build_folders, + get_conda_operation_locks, + get_logger, + on_win, + rm_rf, +) from .variants import get_default_variant -from .conda_interface import cc_platform, cc_conda_build, subdir, url_path - -from .utils import get_build_folders, rm_rf, get_logger, get_conda_operation_locks +if TYPE_CHECKING: + from pathlib import Path + from typing import Any -on_win = (sys.platform == 'win32') -invocation_time = '' +invocation_time = "" def set_invocation_time(): @@ -40,33 +50,17 @@ def set_invocation_time(): conda_build = "conda-build" -filename_hashing_default = 'true' +filename_hashing_default = "true" _src_cache_root_default = None -error_overlinking_default = 'false' -error_overdepending_default = 'false' -noarch_python_build_age_default = 0 -enable_static_default = 'true' -no_rewrite_stdout_env_default = 'false' +error_overlinking_default = "false" +error_overdepending_default = "false" +deprecated.constant("24.5", "24.7", "noarch_python_build_age_default", 0) +enable_static_default = "false" +no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] exit_on_verify_error_default = False conda_pkg_format_default = None -zstd_compression_level_default = 22 - - -# Python2 silliness: -def python2_fs_encode(strin): - return strin.decode(sys.getfilesystemencoding()) if hasattr(strin, 'decode') else strin - - -def _ensure_dir(path): - # this can fail in parallel operation, depending on timing. Just try to make the dir, - # but don't bail if fail. - encpath = python2_fs_encode(path) - if not os.path.isdir(encpath): - try: - os.makedirs(encpath) - except OSError: - pass +zstd_compression_level_default = 19 # we need this to be accessible to the CLI, so it needs to be more static. @@ -74,13 +68,13 @@ def _ensure_dir(path): # translate our internal more meaningful subdirs to the ones that conda understands SUBDIR_ALIASES = { - 'linux-cos5-x86_64': 'linux-64', - 'linux-cos6-x86_64': 'linux-64', - 'linux-cos5-x86': 'linux-32', - 'linux-cos6-x86': 'linux-32', - 'osx-109-x86_64': 'osx-64', - 'win-x86_64': 'win-64', - 'win-x86': 'win-32', + "linux-cos5-x86_64": "linux-64", + "linux-cos6-x86_64": "linux-64", + "linux-cos5-x86": "linux-32", + "linux-cos6-x86": "linux-32", + "osx-109-x86_64": "osx-64", + "win-x86_64": "win-64", + "win-x86": "win-32", } @@ -88,157 +82,163 @@ def _ensure_dir(path): def _get_default_settings(): - return [Setting('activate', True), - Setting('anaconda_upload', binstar_upload), - Setting('force_upload', True), - Setting('channel_urls', []), - Setting('dirty', False), - Setting('include_recipe', True), - Setting('no_download_source', False), - Setting('override_channels', False), - Setting('skip_existing', False), - Setting('token', None), - Setting('user', None), - Setting('labels', []), - Setting('verbose', True), - - Setting('debug', False), - Setting('timeout', 900), - Setting('set_build_id', True), - Setting('disable_pip', False), - Setting('_output_folder', None), - Setting('prefix_length_fallback', True), - Setting('_prefix_length', DEFAULT_PREFIX_LENGTH), - Setting('long_test_prefix', True), - Setting('locking', True), - Setting('max_env_retry', 3), - Setting('remove_work_dir', True), - Setting('_host_platform', None), - Setting('_host_arch', None), - Setting('test_run_post', False), - Setting('filename_hashing', cc_conda_build.get('filename_hashing', - filename_hashing_default).lower() == 'true'), - Setting('keep_old_work', False), - Setting('_src_cache_root', abspath(expanduser(expandvars( - cc_conda_build.get('cache_dir')))) if cc_conda_build.get('cache_dir') else _src_cache_root_default), - Setting('copy_test_source_files', True), - - # should rendering cut out any skipped metadata? - Setting('trim_skip', True), - - # Use channeldata.json for run_export information during rendering. - # Falls back to downloading packages if False or channeldata does - # not exist for the channel. - Setting('use_channeldata', False), - - # Disable the overlinking test for this package. This test checks that transitive DSOs - # are not referenced by DSOs in the package being built. When this happens something - # has gone wrong with: - # 1. Linker flags not being passed, or not working correctly: - # (GNU ld: -as-needed, Apple ld64: -dead_strip_dylibs -no_implicit_dylibs) - # 2. A missing package in reqs/run (maybe that package is missing run_exports?) - # 3. A missing (or broken) CDT package in reqs/build or (on systems without CDTs) - # 4. .. a missing value in the hard-coded but metadata-augmentable library whitelist - # It is important that packages do not suffer from 2 because uninstalling that missing - # package leads to an inability to run this package. - # - # default to not erroring with overlinking for now. We have specified in - # cli/main_build.py that this default will switch in conda-build 4.0. - Setting('error_overlinking', cc_conda_build.get('error_overlinking', - error_overlinking_default).lower() == 'true'), - Setting('error_overdepending', cc_conda_build.get('error_overdepending', - error_overdepending_default).lower() == 'true'), - Setting('noarch_python_build_age', cc_conda_build.get('noarch_python_build_age', - noarch_python_build_age_default)), - Setting('enable_static', cc_conda_build.get('enable_static', - enable_static_default).lower() == 'true'), - Setting('no_rewrite_stdout_env', cc_conda_build.get('no_rewrite_stdout_env', - no_rewrite_stdout_env_default).lower() == 'true'), - - - Setting('index', None), - # support legacy recipes where only build is specified and expected to be the - # folder that packaging is done on - Setting('build_is_host', False), - - # these are primarily for testing. They override the native build platform/arch, - # which is useful in tests, but makes little sense on actual systems. - Setting('_platform', None), - Setting('_arch', None), - Setting('_target_subdir', None), - - # variants - Setting('variant_config_files', []), - # these files preclude usage of any system-wide or cwd config files. - # Config files in recipes are still respected, and they override this file. - Setting('exclusive_config_files', []), - Setting('ignore_system_variants', False), - Setting('hash_length', 7), - - # append/clobber metadata section data (for global usage. Can also add files to - # recipe.) - Setting('append_sections_file', None), - Setting('clobber_sections_file', None), - Setting('bootstrap', None), - Setting('extra_meta', {}), - - # source provisioning. - Setting('git_commits_since_tag', 0), - - # pypi upload settings (twine) - Setting('password', None), - Setting('sign', False), - Setting('sign_with', 'gpg'), - Setting('identity', None), - Setting('config_file', None), - Setting('repository', 'pypitest'), - - Setting('verify', True), - Setting('ignore_verify_codes', - cc_conda_build.get('ignore_verify_codes', ignore_verify_codes_default)), - Setting('exit_on_verify_error', - cc_conda_build.get('exit_on_verify_error', exit_on_verify_error_default)), - - # Recipes that have no host section, only build, should bypass the build/host line. - # This is to make older recipes still work with cross-compiling. True cross-compiling - # involving compilers (not just python) will still require recipe modification to have - # distinct host and build sections, but simple python stuff should work without. - Setting('merge_build_host', False), - # this one is the state that can be set elsewhere, which affects how - # the "build_prefix" works. The one above is a setting. - Setting('_merge_build_host', False), - - # path to output build statistics to - Setting('stats_file', None), - - # extra deps to add to test env creation - Setting('extra_deps', []), - - # customize this so pip doesn't look in places we don't want. Per-build path by default. - Setting('_pip_cache_dir', None), - - Setting('zstd_compression_level', - cc_conda_build.get('zstd_compression_level', zstd_compression_level_default)), - - # this can be set to different values (currently only 2 means anything) to use package formats - Setting('conda_pkg_format', cc_conda_build.get('pkg_format', conda_pkg_format_default)), - - Setting('suppress_variables', False), - - Setting('build_id_pat', cc_conda_build.get('build_id_pat', - '{n}_{t}')), - - ] - - -def print_function_deprecation_warning(func): - def func_wrapper(*args, **kw): - log = get_logger(__name__) - log.warn("WARNING: attribute {} is deprecated and will be removed in conda-build 4.0. " - "Please update your code - file issues on the conda-build issue tracker " - "if you need help.".format(func.__name__)) - return func(*args, **kw) - return func_wrapper + return [ + Setting("activate", True), + Setting("anaconda_upload", context.binstar_upload), + Setting("force_upload", True), + Setting("channel_urls", []), + Setting("dirty", False), + Setting("include_recipe", True), + Setting("no_download_source", False), + Setting("skip_existing", False), + Setting("token", None), + Setting("user", None), + Setting("labels", []), + Setting("verbose", True), + Setting("debug", False), + Setting("timeout", 900), + Setting("set_build_id", True), + Setting("disable_pip", False), + Setting("_output_folder", None), + Setting("prefix_length_fallback", True), + Setting("_prefix_length", DEFAULT_PREFIX_LENGTH), + Setting("long_test_prefix", True), + Setting("locking", True), + Setting("max_env_retry", 3), + Setting("remove_work_dir", True), + Setting("_host_platform", None), + Setting("_host_arch", None), + Setting("test_run_post", False), + Setting( + "filename_hashing", + context.conda_build.get( + "filename_hashing", filename_hashing_default + ).lower() + == "true", + ), + Setting("keep_old_work", False), + Setting( + "_src_cache_root", + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) + else _src_cache_root_default, + ), + Setting("copy_test_source_files", True), + # should rendering cut out any skipped metadata? + Setting("trim_skip", True), + # Use channeldata.json for run_export information during rendering. + # Falls back to downloading packages if False or channeldata does + # not exist for the channel. + Setting("use_channeldata", False), + # Disable the overlinking test for this package. This test checks that transitive DSOs + # are not referenced by DSOs in the package being built. When this happens something + # has gone wrong with: + # 1. Linker flags not being passed, or not working correctly: + # (GNU ld: -as-needed, Apple ld64: -dead_strip_dylibs -no_implicit_dylibs) + # 2. A missing package in reqs/run (maybe that package is missing run_exports?) + # 3. A missing (or broken) CDT package in reqs/build or (on systems without CDTs) + # 4. .. a missing value in the hard-coded but metadata-augmentable library whitelist + # It is important that packages do not suffer from 2 because uninstalling that missing + # package leads to an inability to run this package. + # + # default to not erroring with overlinking for now. We have specified in + # cli/main_build.py that this default will switch in conda-build 4.0. + Setting( + "error_overlinking", + context.conda_build.get( + "error_overlinking", error_overlinking_default + ).lower() + == "true", + ), + Setting( + "error_overdepending", + context.conda_build.get( + "error_overdepending", error_overdepending_default + ).lower() + == "true", + ), + Setting( + "enable_static", + context.conda_build.get("enable_static", enable_static_default).lower() + == "true", + ), + Setting( + "no_rewrite_stdout_env", + context.conda_build.get( + "no_rewrite_stdout_env", no_rewrite_stdout_env_default + ).lower() + == "true", + ), + Setting("index", None), + # support legacy recipes where only build is specified and expected to be the + # folder that packaging is done on + Setting("build_is_host", False), + # these are primarily for testing. They override the native build platform/arch, + # which is useful in tests, but makes little sense on actual systems. + Setting("_platform", None), + Setting("_arch", None), + Setting("_target_subdir", None), + # variants + Setting("variant_config_files", []), + # these files preclude usage of any system-wide or cwd config files. + # Config files in recipes are still respected, and they override this file. + Setting("exclusive_config_files", []), + Setting("ignore_system_variants", False), + Setting("hash_length", 7), + # append/clobber metadata section data (for global usage. Can also add files to + # recipe.) + Setting("append_sections_file", None), + Setting("clobber_sections_file", None), + Setting("bootstrap", None), + Setting("extra_meta", {}), + # source provisioning. + Setting("git_commits_since_tag", 0), + # pypi upload settings (twine) + Setting("password", None), + Setting("sign", False), + Setting("sign_with", "gpg"), + Setting("identity", None), + Setting("config_file", None), + Setting("repository", "pypitest"), + Setting("verify", True), + Setting( + "ignore_verify_codes", + context.conda_build.get("ignore_verify_codes", ignore_verify_codes_default), + ), + Setting( + "exit_on_verify_error", + context.conda_build.get( + "exit_on_verify_error", exit_on_verify_error_default + ), + ), + # Recipes that have no host section, only build, should bypass the build/host line. + # This is to make older recipes still work with cross-compiling. True cross-compiling + # involving compilers (not just python) will still require recipe modification to have + # distinct host and build sections, but simple python stuff should work without. + Setting("merge_build_host", False), + # this one is the state that can be set elsewhere, which affects how + # the "build_prefix" works. The one above is a setting. + Setting("_merge_build_host", False), + # path to output build statistics to + Setting("stats_file", None), + # extra deps to add to test env creation + Setting("extra_deps", []), + # customize this so pip doesn't look in places we don't want. Per-build path by default. + Setting("_pip_cache_dir", None), + Setting( + "zstd_compression_level", + context.conda_build.get( + "zstd_compression_level", zstd_compression_level_default + ), + ), + # this can be set to different values (currently only 2 means anything) to use package formats + Setting( + "conda_pkg_format", + context.conda_build.get("pkg_format", conda_pkg_format_default), + ), + Setting("suppress_variables", False), + Setting("build_id_pat", context.conda_build.get("build_id_pat", "{n}_{t}")), + ] class Config: @@ -255,7 +255,9 @@ def __init__(self, variant=None, **kwargs): self._src_cache_root = os.path.expanduser(self._src_cache_root) def _set_attribute_from_kwargs(self, kwargs, attr, default): - value = kwargs.get(attr, getattr(self, attr) if hasattr(self, attr) else default) + value = kwargs.get( + attr, getattr(self, attr) if hasattr(self, attr) else default + ) setattr(self, attr, value) if attr in kwargs: del kwargs[attr] @@ -265,13 +267,13 @@ def env(lang, default): version = kwargs.pop(lang, None) if not version: # Hooray for corner cases. - if lang == 'python': - lang = 'py' - elif lang == 'numpy': - lang = 'npy' - elif lang == 'r_base': - lang = 'r' - var = 'CONDA_' + lang.upper() + if lang == "python": + lang = "py" + elif lang == "numpy": + lang = "npy" + elif lang == "r_base": + lang = "r" + var = "CONDA_" + lang.upper() version = os.getenv(var) if os.getenv(var) else default elif isinstance(version, list) and len(version) == 1: version = version[0] @@ -280,27 +282,32 @@ def env(lang, default): def set_lang(variant, lang): value = env(lang, self.variant.get(lang)) if value: - if '.' not in str(value): - value = '.'.join((value[0], value[1:])) + if "." not in str(value): + value = ".".join((value[0], value[1:])) variant[lang] = value # this is where we override any variant config files with the legacy CONDA_* vars # or CLI params - for lang in ('perl', 'lua', 'python', 'numpy', 'r_base'): + for lang in ("perl", "lua", "python", "numpy", "r_base"): set_lang(self.variant, lang) - self._build_id = kwargs.pop('build_id', getattr(self, '_build_id', "")) - source_cache = kwargs.pop('cache_dir', None) - croot = kwargs.pop('croot', None) + # --override-channels is a valid CLI argument but we no longer wish to set it here + # use conda.base.context.context.override_channels instead + kwargs.pop("override_channels", None) + + self._build_id = kwargs.pop("build_id", getattr(self, "_build_id", "")) + source_cache = kwargs.pop("cache_dir", None) + croot = kwargs.pop("croot", None) if source_cache: - self._src_cache_root = os.path.abspath(os.path.normpath( - os.path.expanduser(source_cache))) + self._src_cache_root = os.path.abspath( + os.path.normpath(os.path.expanduser(source_cache)) + ) if croot: self._croot = os.path.abspath(os.path.normpath(os.path.expanduser(croot))) else: # set default value (not actually None) - self._croot = getattr(self, '_croot', None) + self._croot = getattr(self, "_croot", None) # handle known values better than unknown (allow defaults) for value in _get_default_settings(): @@ -314,43 +321,52 @@ def set_lang(variant, lang): def arch(self): """Always the native (build system) arch, except when pretending to be some other platform""" - return self._arch or subdir.rsplit('-', 1)[1] + return self._arch or context.subdir.rsplit("-", 1)[1] @arch.setter def arch(self, value): log = get_logger(__name__) - log.warn("Setting build arch. This is only useful when pretending to be on another " - "arch, such as for rendering necessary dependencies on a non-native arch. " - "I trust that you know what you're doing.") + log.warn( + "Setting build arch. This is only useful when pretending to be on another " + "arch, such as for rendering necessary dependencies on a non-native arch. " + "I trust that you know what you're doing." + ) self._arch = str(value) @property def platform(self): """Always the native (build system) OS, except when pretending to be some other platform""" - return self._platform or subdir.rsplit('-', 1)[0] + return self._platform or context.subdir.rsplit("-", 1)[0] @platform.setter def platform(self, value): log = get_logger(__name__) - log.warn("Setting build platform. This is only useful when " - "pretending to be on another platform, such as " - "for rendering necessary dependencies on a non-native " - "platform. I trust that you know what you're doing.") - if value == 'noarch': - raise ValueError("config platform should never be noarch. Set host_platform instead.") + log.warn( + "Setting build platform. This is only useful when " + "pretending to be on another platform, such as " + "for rendering necessary dependencies on a non-native " + "platform. I trust that you know what you're doing." + ) + if value == "noarch": + raise ValueError( + "config platform should never be noarch. Set host_platform instead." + ) self._platform = value @property def build_subdir(self): """Determines channel to download build env packages from. - Should generally be the native platform. Does not preclude packages from noarch.""" - return '-'.join((self.platform, self.arch)) + Should generally be the native platform. Does not preclude packages from noarch. + """ + return "-".join((self.platform, self.arch)) @property def host_arch(self): try: - variant_arch = self.variant.get('target_platform', self.build_subdir).split('-', 1)[1] + variant_arch = self.variant.get("target_platform", self.build_subdir).split( + "-", 1 + )[1] except IndexError: variant_arch = 64 return self._host_arch or variant_arch @@ -361,11 +377,11 @@ def host_arch(self, value): @property def noarch(self): - return self.host_platform == 'noarch' + return self.host_platform == "noarch" def reset_platform(self): - if not self.platform == cc_platform: - self.platform = cc_platform + if not self.platform == context.platform: + self.platform = context.platform @property def subdir(self): @@ -373,8 +389,10 @@ def subdir(self): @property def host_platform(self): - return (self._host_platform or - self.variant.get('target_platform', self.build_subdir).split('-', 1)[0]) + return ( + self._host_platform + or self.variant.get("target_platform", self.build_subdir).split("-", 1)[0] + ) @host_platform.setter def host_platform(self, value): @@ -382,8 +400,8 @@ def host_platform(self, value): @property def host_subdir(self): - subdir = self.variant.get('target_platform', self.build_subdir) - if self.host_platform == 'noarch': + subdir = self.variant.get("target_platform", self.build_subdir) + if self.host_platform == "noarch": subdir = self.host_platform elif subdir != "-".join([self.host_platform, str(self.host_arch)]): subdir = "-".join([self.host_platform, str(self.host_arch)]) @@ -392,7 +410,7 @@ def host_subdir(self): @host_subdir.setter def host_subdir(self, value): value = SUBDIR_ALIASES.get(value, value) - values = value.rsplit('-', 1) + values = value.rsplit("-", 1) self.host_platform = values[0] if len(values) > 1: self.host_arch = values[1] @@ -415,8 +433,9 @@ def exclusive_config_file(self): def exclusive_config_file(self, value): if len(self.exclusive_config_files) > 1: raise ValueError( - 'Cannot set singular exclusive_config_file ' - 'if multiple exclusive_config_files are present.') + "Cannot set singular exclusive_config_file " + "if multiple exclusive_config_files are present." + ) if value is None: self.exclusive_config_files = [] else: @@ -431,25 +450,25 @@ def src_cache_root(self, value): self._src_cache_root = value @property - def croot(self): + def croot(self) -> str: """This is where source caches and work folders live""" if not self._croot: - _bld_root_env = os.getenv('CONDA_BLD_PATH') - _bld_root_rc = cc_conda_build.get('root-dir') + _bld_root_env = os.getenv("CONDA_BLD_PATH") + _bld_root_rc = context.conda_build.get("root-dir") if _bld_root_env: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: self._croot = abspath(expanduser(expandvars(_bld_root_rc))) - elif root_writable: - self._croot = join(root_dir, 'conda-bld') + elif context.root_writable: + self._croot = join(context.root_prefix, "conda-bld") else: - self._croot = abspath(expanduser('~/conda-bld')) - return python2_fs_encode(self._croot) + self._croot = abspath(expanduser("~/conda-bld")) + return self._croot @croot.setter - def croot(self, croot): + def croot(self, croot: str | os.PathLike | Path) -> None: """Set croot - if None is passed, then the default value will be used""" - self._croot = croot + self._croot = str(croot) if croot else None @property def output_folder(self): @@ -465,148 +484,92 @@ def build_folder(self): It has the environments and work directories.""" return os.path.join(self.croot, self.build_id) - # back compat for conda-build-all - expects CONDA_* vars to be attributes of the config object - @property - @print_function_deprecation_warning - def CONDA_LUA(self): - return self.variant.get('lua', get_default_variant(self)['lua']) - - @CONDA_LUA.setter - @print_function_deprecation_warning - def CONDA_LUA(self, value): - self.variant['lua'] = value - - @property - @print_function_deprecation_warning - def CONDA_PY(self): - value = self.variant.get('python', get_default_variant(self)['python']) - return int(''.join(value.split('.'))) - - @CONDA_PY.setter - @print_function_deprecation_warning - def CONDA_PY(self, value): - value = str(value) - self.variant['python'] = '.'.join((value[0], value[1:])) - - @property - @print_function_deprecation_warning - def CONDA_NPY(self): - value = self.variant.get('numpy', get_default_variant(self)['numpy']) - return int(''.join(value.split('.'))) - - @CONDA_NPY.setter - @print_function_deprecation_warning - def CONDA_NPY(self, value): - value = str(value) - self.variant['numpy'] = '.'.join((value[0], value[1:])) - - @property - @print_function_deprecation_warning - def CONDA_PERL(self): - return self.variant.get('perl', get_default_variant(self)['perl']) - - @CONDA_PERL.setter - @print_function_deprecation_warning - def CONDA_PERL(self, value): - self.variant['perl'] = value - - @property - @print_function_deprecation_warning - def CONDA_R(self): - - return self.variant.get('r_base', get_default_variant(self)['r_base']) - - @CONDA_R.setter - @print_function_deprecation_warning - def CONDA_R(self, value): - self.variant['r_base'] = value - def _get_python(self, prefix, platform): - if platform.startswith('win') or (platform == "noarch" and sys.platform == "win32"): - if os.path.isfile(os.path.join(prefix, 'python_d.exe')): - res = join(prefix, 'python_d.exe') + if platform.startswith("win") or (platform == "noarch" and on_win): + if os.path.isfile(os.path.join(prefix, "python_d.exe")): + res = join(prefix, "python_d.exe") else: - res = join(prefix, 'python.exe') + res = join(prefix, "python.exe") else: - res = join(prefix, 'bin/python') + res = join(prefix, "bin/python") return res def _get_perl(self, prefix, platform): - if platform.startswith('win'): - res = join(prefix, 'Library', 'bin', 'perl.exe') + if platform.startswith("win"): + res = join(prefix, "Library", "bin", "perl.exe") else: - res = join(prefix, 'bin/perl') + res = join(prefix, "bin/perl") return res # TODO: This is probably broken on Windows, but no one has a lua package on windows to test. def _get_lua(self, prefix, platform): - lua_ver = self.variant.get('lua', get_default_variant(self)['lua']) + lua_ver = self.variant.get("lua", get_default_variant(self)["lua"]) binary_name = "luajit" if (lua_ver and lua_ver[0] == "2") else "lua" - if platform.startswith('win'): - res = join(prefix, 'Library', 'bin', f'{binary_name}.exe') + if platform.startswith("win"): + res = join(prefix, "Library", "bin", f"{binary_name}.exe") else: - res = join(prefix, f'bin/{binary_name}') + res = join(prefix, f"bin/{binary_name}") return res def _get_r(self, prefix, platform): - if platform.startswith('win') or (platform == "noarch" and sys.platform == 'win32'): - res = join(prefix, 'Scripts', 'R.exe') + if platform.startswith("win") or (platform == "noarch" and on_win): + res = join(prefix, "Scripts", "R.exe") # MRO test: if not os.path.exists(res): - res = join(prefix, 'bin', 'R.exe') + res = join(prefix, "bin", "R.exe") else: - res = join(prefix, 'bin', 'R') + res = join(prefix, "bin", "R") return res def _get_rscript(self, prefix, platform): - if platform.startswith('win'): - res = join(prefix, 'Scripts', 'Rscript.exe') + if platform.startswith("win"): + res = join(prefix, "Scripts", "Rscript.exe") # MRO test: if not os.path.exists(res): - res = join(prefix, 'bin', 'Rscript.exe') + res = join(prefix, "bin", "Rscript.exe") else: - res = join(prefix, 'bin', 'Rscript') + res = join(prefix, "bin", "Rscript") return res - def compute_build_id(self, package_name, package_version='0', reset=False): - time_re = r'([_-])([0-9]{13})' - pat_dict = {'n': package_name, - 'v': str(package_version), - 't': '{t}'} + def compute_build_id(self, package_name, package_version="0", reset=False): + time_re = r"([_-])([0-9]{13})" + pat_dict = {"n": package_name, "v": str(package_version), "t": "{t}"} # Use the most recent build with matching recipe name, or else the recipe name. build_folders = [] if not self.dirty: if reset: set_invocation_time() else: - old_build_id_t = self.build_id_pat if self.build_id_pat else '{n}-{v}_{t}' + old_build_id_t = self.build_id_pat if self.build_id_pat else "{n}-{v}_{t}" old_build_id_t = old_build_id_t.format(**pat_dict) build_folders_all = get_build_folders(self.croot) for folder_full in build_folders_all: folder = os.path.basename(folder_full) - untimed_folder = re.sub(time_re, r'\g<1>{t}', folder, flags=re.UNICODE) + untimed_folder = re.sub(time_re, r"\g<1>{t}", folder, flags=re.UNICODE) if untimed_folder == old_build_id_t: build_folders.append(folder_full) prev_build_id = None if build_folders: # Use the most recent build with matching recipe name prev_build_id = os.path.basename(build_folders[-1]) - old_dir = os.path.join(build_folders[-1], 'work') + old_dir = os.path.join(build_folders[-1], "work") else: # Maybe call set_invocation_time() here? - pat_dict['t'] = invocation_time + pat_dict["t"] = invocation_time test_old_dir = self.work_dir old_dir = test_old_dir if os.path.exists(test_old_dir) else None if self.set_build_id and (not self._build_id or reset): - assert not os.path.isabs(package_name), ("package name should not be a absolute path, " - "to preserve croot during path joins") + assert not os.path.isabs(package_name), ( + "package name should not be a absolute path, " + "to preserve croot during path joins" + ) if self.dirty and prev_build_id: old_dir = self.work_dir if len(os.listdir(self.work_dir)) > 0 else None self._build_id = prev_build_id else: # important: this is recomputing prefixes and determines where work folders are. - build_id = self.build_id_pat if self.build_id_pat else '{n}-{v}_{t}' + build_id = self.build_id_pat if self.build_id_pat else "{n}-{v}_{t}" self._build_id = build_id.format(**pat_dict) if old_dir: work_dir = self.work_dir @@ -624,9 +587,11 @@ def build_id(self): @build_id.setter def build_id(self, _build_id): _build_id = _build_id.rstrip("/").rstrip("\\") - assert not os.path.isabs(_build_id), ("build_id should not be an absolute path, " - "to preserve croot during path joins") - self._build_id = python2_fs_encode(_build_id) + assert not os.path.isabs(_build_id), ( + "build_id should not be an absolute path, " + "to preserve croot during path joins" + ) + self._build_id = _build_id @property def prefix_length(self): @@ -638,14 +603,16 @@ def prefix_length(self, length): @property def _short_host_prefix(self): - return join(self.build_folder, '_h_env') + return join(self.build_folder, "_h_env") @property def _long_host_prefix(self): placeholder_length = self.prefix_length - len(self._short_host_prefix) - placeholder = '_placehold' + placeholder = "_placehold" repeats = int(math.ceil(placeholder_length / len(placeholder)) + 1) - placeholder = (self._short_host_prefix + repeats * placeholder)[:self.prefix_length] + placeholder = (self._short_host_prefix + repeats * placeholder)[ + : self.prefix_length + ] return max(self._short_host_prefix, placeholder) @property @@ -656,7 +623,7 @@ def build_prefix(self): if self._merge_build_host: prefix = self.host_prefix else: - prefix = join(self.build_folder, '_build_env') + prefix = join(self.build_folder, "_build_env") return prefix @property @@ -669,13 +636,13 @@ def host_prefix(self): @property def _short_test_prefix(self): - return join(self.build_folder, '_test_env') + return join(self.build_folder, "_test_env") def _long_prefix(self, base_prefix): placeholder_length = self.prefix_length - len(base_prefix) - placeholder = '_placehold' + placeholder = "_placehold" repeats = int(math.ceil(placeholder_length / len(placeholder)) + 1) - placeholder = (base_prefix + repeats * placeholder)[:self.prefix_length] + placeholder = (base_prefix + repeats * placeholder)[: self.prefix_length] return max(base_prefix, placeholder) @property @@ -717,79 +684,82 @@ def rscript_bin(self, prefix, platform): @property def info_dir(self): """Path to the info dir in the build prefix, where recipe metadata is stored""" - path = join(self.host_prefix, 'info') - _ensure_dir(path) + path = join(self.host_prefix, "info") + os.makedirs(path, exist_ok=True) return path @property def meta_dir(self): """Path to the conda-meta dir in the build prefix, where package index json files are stored""" - path = join(self.host_prefix, 'conda-meta') - _ensure_dir(path) + path = join(self.host_prefix, "conda-meta") + os.makedirs(path, exist_ok=True) return path @property def broken_dir(self): """Where packages that fail the test phase are placed""" path = join(self.croot, "broken") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def bldpkgs_dir(self): - """ Dir where the package is saved. """ + """Dir where the package is saved.""" path = join(self.croot, self.host_subdir) - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def bldpkgs_dirs(self): - """ Dirs where previous build packages might be. """ + """Dirs where previous build packages might be.""" # The first two *might* be the same, but might not, depending on if this is a cross-compile. # subdir should be the native platform, while self.subdir would be the host platform. - return {join(self.croot, self.host_subdir), join(self.croot, subdir), - join(self.croot, "noarch"), } + return { + join(self.croot, self.host_subdir), + join(self.croot, context.subdir), + join(self.croot, "noarch"), + } @property def src_cache(self): """Where tarballs and zip files are downloaded and stored""" - path = join(self.src_cache_root, 'src_cache') - _ensure_dir(path) + path = join(self.src_cache_root, "src_cache") + os.makedirs(path, exist_ok=True) return path @property def git_cache(self): """Where local clones of git sources are stored""" - path = join(self.src_cache_root, 'git_cache') - _ensure_dir(path) + path = join(self.src_cache_root, "git_cache") + os.makedirs(path, exist_ok=True) return path @property def hg_cache(self): """Where local clones of hg sources are stored""" - path = join(self.src_cache_root, 'hg_cache') - _ensure_dir(path) + path = join(self.src_cache_root, "hg_cache") + os.makedirs(path, exist_ok=True) return path @property def svn_cache(self): """Where local checkouts of svn sources are stored""" - path = join(self.src_cache_root, 'svn_cache') - _ensure_dir(path) + path = join(self.src_cache_root, "svn_cache") + os.makedirs(path, exist_ok=True) return path @property def work_dir(self): """Where the source for the build is extracted/copied to.""" - path = join(self.build_folder, 'work') - _ensure_dir(path) + path = join(self.build_folder, "work") + os.makedirs(path, exist_ok=True) return path @property def pip_cache_dir(self): - path = self._pip_cache_dir or join(self.build_folder, 'pip_cache') - _ensure_dir(path) + path = self._pip_cache_dir or join(self.build_folder, "pip_cache") + os.makedirs(path, exist_ok=True) return path @pip_cache_dir.setter @@ -799,35 +769,57 @@ def pip_cache_dir(self, path): @property def test_dir(self): """The temporary folder where test files are copied to, and where tests start execution""" - path = join(self.build_folder, 'test_tmp') - _ensure_dir(path) + path = join(self.build_folder, "test_tmp") + os.makedirs(path, exist_ok=True) return path @property def subdirs_same(self): return self.host_subdir == self.build_subdir + @property + @deprecated( + "24.5", + "24.7", + addendum="Use `conda.base.context.context.override_channels` instead.", + ) + def override_channels(self): + return context.override_channels + def clean(self, remove_folders=True): # build folder is the whole burrito containing envs and source folders # It will only exist if we download source, or create a build or test environment - if remove_folders and not getattr(self, 'dirty') and not getattr(self, 'keep_old_work'): + if ( + remove_folders + and not getattr(self, "dirty") + and not getattr(self, "keep_old_work") + ): if self.build_id: if os.path.isdir(self.build_folder): rm_rf(self.build_folder) else: - for path in [self.work_dir, self.test_dir, self.build_prefix, self.test_prefix]: + for path in [ + self.work_dir, + self.test_dir, + self.build_prefix, + self.test_prefix, + ]: if os.path.isdir(path): rm_rf(path) - if os.path.isfile(os.path.join(self.build_folder, 'prefix_files')): - rm_rf(os.path.join(self.build_folder, 'prefix_files')) + if os.path.isfile(os.path.join(self.build_folder, "prefix_files")): + rm_rf(os.path.join(self.build_folder, "prefix_files")) else: - print("\nLeaving build/test directories:" - "\n Work:\n", self.work_dir, - "\n Test:\n", self.test_dir, - "\nLeaving build/test environments:" - "\n Test:\nsource activate ", self.test_prefix, - "\n Build:\nsource activate ", self.build_prefix, - "\n\n") + print( + "\nLeaving build/test directories:\n Work:\n", + self.work_dir, + "\n Test:\n", + self.test_dir, + "\nLeaving build/test environments:\n Test:\nsource activate ", + self.test_prefix, + "\n Build:\nsource activate ", + self.build_prefix, + "\n\n", + ) for lock in get_conda_operation_locks(self.locking, self.bldpkgs_dirs): if os.path.isfile(lock.lock_file): @@ -837,10 +829,10 @@ def clean_pkgs(self): for folder in self.bldpkgs_dirs: rm_rf(folder) - def copy(self): + def copy(self) -> Config: new = copy.copy(self) new.variant = copy.deepcopy(self.variant) - if hasattr(self, 'variants'): + if hasattr(self, "variants"): new.variants = copy.deepcopy(self.variants) return new @@ -849,16 +841,27 @@ def __enter__(self): pass def __exit__(self, e_type, e_value, traceback): - if not getattr(self, 'dirty') and e_type is None and not getattr(self, 'keep_old_work'): - get_logger(__name__).info("--dirty flag and --keep-old-work not specified. " - "Removing build/test folder after successful build/test.\n") + if ( + not getattr(self, "dirty") + and e_type is None + and not getattr(self, "keep_old_work") + ): + get_logger(__name__).info( + "--dirty flag and --keep-old-work not specified. " + "Removing build/test folder after successful build/test.\n" + ) self.clean() else: self.clean(remove_folders=False) -def get_or_merge_config(config, variant=None, **kwargs): - """Always returns a new object - never changes the config that might be passed in.""" +def _get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: + # This function should only ever be called via get_or_merge_config. + # It only exists for us to monkeypatch a default config when running tests. if not config: config = Config(variant=variant) else: @@ -872,8 +875,17 @@ def get_or_merge_config(config, variant=None, **kwargs): return config +def get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: + """Always returns a new object - never changes the config that might be passed in.""" + return _get_or_merge_config(config, variant=variant, **kwargs) + + def get_channel_urls(args): - channel_urls = args.get('channel') or args.get('channels') or () + channel_urls = args.get("channel") or args.get("channels") or () final_channel_urls = [] for url in channel_urls: diff --git a/conda_build/convert.py b/conda_build/convert.py index 07fdc5c4a8..e910d47e21 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -3,18 +3,25 @@ """ Tools for converting conda packages """ + +from __future__ import annotations + import glob -import json import hashlib +import json import os -from pathlib import Path import re import shutil import sys import tarfile import tempfile +from pathlib import Path +from typing import TYPE_CHECKING -from conda_build.utils import filter_info_files, walk +from .utils import ensure_list, filter_info_files, walk + +if TYPE_CHECKING: + from typing import Iterable def retrieve_c_extensions(file_path, show_imports=False): @@ -31,14 +38,17 @@ def retrieve_c_extensions(file_path, show_imports=False): show_imports (bool) -- output the C extensions included in the package """ c_extension_pattern = re.compile( - r'(Lib\/|lib\/python\d\.\d\/|lib\/)(site-packages\/|lib-dynload)?(.*)') + r"(Lib\/|lib\/python\d\.\d\/|lib\/)(site-packages\/|lib-dynload)?(.*)" + ) imports = [] with tarfile.open(file_path) as tar: for filename in tar.getnames(): - if filename.endswith(('.pyd', '.so')): + if filename.endswith((".pyd", ".so")): filename_match = c_extension_pattern.match(filename) - import_name = 'import {}' .format(filename_match.group(3).replace('/', '.')) + import_name = "import {}".format( + filename_match.group(3).replace("/", ".") + ) imports.append(import_name) return imports @@ -51,23 +61,23 @@ def retrieve_package_platform(file_path): file_path (str) -- the file path to the source package tar file """ with tarfile.open(file_path) as tar: - index = json.loads(tar.extractfile('info/index.json').read().decode('utf-8')) + index = json.loads(tar.extractfile("info/index.json").read().decode("utf-8")) - platform = index['platform'] + platform = index["platform"] - if index.get('arch') == 'x86_64': - architecture = '64' - elif index.get('arch') == 'x86': - architecture = '32' + if index.get("arch") == "x86_64": + architecture = "64" + elif index.get("arch") == "x86": + architecture = "32" else: - architecture = index.get('arch') + architecture = index.get("arch") - if platform.startswith('linux') or platform.startswith('osx'): - return ('unix', platform, architecture) - elif index['platform'].startswith('win'): - return ('win', platform, architecture) + if platform.startswith("linux") or platform.startswith("osx"): + return ("unix", platform, architecture) + elif index["platform"].startswith("win"): + return ("win", platform, architecture) else: - raise RuntimeError('Package platform not recognized.') + raise RuntimeError("Package platform not recognized.") def retrieve_python_version(file_path): @@ -85,29 +95,32 @@ def retrieve_python_version(file_path): to the source tar file itself, or the file path to the temporary directory containing the extracted source package contents """ - if 'python' in file_path: - pattern = re.compile(r'python\d\.\d') + if "python" in file_path: + pattern = re.compile(r"python\d\.\d+") matched = pattern.search(file_path) if matched: return matched.group(0) else: - if file_path.endswith(('.tar.bz2', '.tar')): + if file_path.endswith((".tar.bz2", ".tar")): with tarfile.open(file_path) as tar: - index = json.loads(tar.extractfile('info/index.json').read().decode('utf-8')) + index = json.loads( + tar.extractfile("info/index.json").read().decode("utf-8") + ) else: - path_file = os.path.join(file_path, 'info/index.json') + path_file = os.path.join(file_path, "info/index.json") with open(path_file) as index_file: index = json.load(index_file) - build_version_number = re.search(r'(.*)?(py)(\d\d)(.*)?', index['build']).group(3) - build_version = re.sub(r'\A.*py\d\d.*\Z', 'python', index['build']) + build_version_number = re.search(r"(.*)?(py)(\d\d)(.*)?", index["build"]).group( + 3 + ) + build_version = re.sub(r"\A.*py\d\d.*\Z", "python", index["build"]) - return '{}{}.{}' .format(build_version, - build_version_number[0], build_version_number[1]) + return f"{build_version}{build_version_number[0]}.{build_version_number[1]}" def extract_temporary_directory(file_path): @@ -167,41 +180,47 @@ def update_index_file(temp_dir, target_platform, dependencies, verbose): dependencies (List[str]) -- the dependencies passed from the command line verbose (bool) -- show output of items that are updated """ - index_file = os.path.join(temp_dir, 'info/index.json') + index_file = os.path.join(temp_dir, "info/index.json") with open(index_file) as file: index = json.load(file) - platform, architecture = target_platform.split('-') - other_platforms = ['linux-ppc64', 'linux-ppc64le', 'linux-s390x', - 'linux-armv6l', 'linux-armv7l', 'linux-aarch64'] + platform, architecture = target_platform.split("-") + other_platforms = [ + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + ] if target_platform in other_platforms: source_architecture = architecture - elif index.get('arch') == 'x86_64': - source_architecture = '64' + elif index.get("arch") == "x86_64": + source_architecture = "64" else: - source_architecture = '32' + source_architecture = "32" if verbose: - print('Updating platform from {} to {}' .format(index['platform'], platform)) - print('Updating subdir from {} to {}' .format(index['subdir'], target_platform)) - print('Updating architecture from {} to {}' .format(source_architecture, architecture)) + print("Updating platform from {} to {}".format(index["platform"], platform)) + print("Updating subdir from {} to {}".format(index["subdir"], target_platform)) + print(f"Updating architecture from {source_architecture} to {architecture}") - index['platform'] = platform - index['subdir'] = target_platform + index["platform"] = platform + index["subdir"] = target_platform - if architecture == '64': - index['arch'] = 'x86_64' - elif architecture == '32': - index['arch'] = 'x86' + if architecture == "64": + index["arch"] = "x86_64" + elif architecture == "32": + index["arch"] = "x86" else: - index['arch'] = architecture + index["arch"] = architecture if dependencies: - index['depends'] = update_dependencies(dependencies, index['depends']) + index["depends"] = update_dependencies(dependencies, index["depends"]) - with open(index_file, 'w') as file: + with open(index_file, "w") as file: json.dump(index, file, indent=2) return index_file @@ -221,14 +240,16 @@ def update_lib_path(path, target_platform, temp_dir=None): temp_dir (str) -- the file path to the temporary directory that contains the source package's extracted contents """ - if target_platform == 'win': + if target_platform == "win": python_version = retrieve_python_version(path) - renamed_lib_path = re.sub(r'\Alib', 'Lib', path).replace(python_version, '') + renamed_lib_path = re.sub(r"\Alib", "Lib", path).replace(python_version, "") - elif target_platform == 'unix': + elif target_platform == "unix": python_version = retrieve_python_version(temp_dir) - lib_python_version = os.path.join('lib', python_version).replace('\\', '\\\\') - renamed_lib_path = re.sub(r'\ALib', lib_python_version, path.replace('\\', '\\\\')) + lib_python_version = os.path.join("lib", python_version).replace("\\", "\\\\") + renamed_lib_path = re.sub( + r"\ALib", lib_python_version, path.replace("\\", "\\\\") + ) return os.path.normpath(renamed_lib_path) @@ -249,28 +270,29 @@ def update_lib_contents(lib_directory, temp_dir, target_platform, file_path): target_platform (str) -- the platform to target: 'unix' or win' file_path (str) -- the file path to the source package tar file """ - if target_platform == 'win': + if target_platform == "win": try: - for lib_file in glob.iglob('{}/python*/**' .format(lib_directory)): - if 'site-packages' in lib_file: + for lib_file in glob.iglob(f"{lib_directory}/python*/**"): + if "site-packages" in lib_file: new_site_packages_path = os.path.join( - temp_dir, os.path.join('lib', 'site-packages')) + temp_dir, os.path.join("lib", "site-packages") + ) os.renames(lib_file, new_site_packages_path) else: if retrieve_python_version(lib_file) is not None: python_version = retrieve_python_version(lib_file) - os.renames(lib_file, lib_file.replace(python_version, '')) + os.renames(lib_file, lib_file.replace(python_version, "")) except OSError: pass try: - shutil.rmtree(glob.glob('{}/python*' .format(lib_directory))[0]) + shutil.rmtree(glob.glob(f"{lib_directory}/python*")[0]) except IndexError: pass - shutil.move(os.path.join(temp_dir, 'lib'), os.path.join(temp_dir, 'Lib')) + shutil.move(os.path.join(temp_dir, "lib"), os.path.join(temp_dir, "Lib")) - elif target_platform == 'unix': + elif target_platform == "unix": temp_dir = Path(temp_dir) src_dir = temp_dir / "Lib" dst_dir = temp_dir / "lib" @@ -302,16 +324,18 @@ def update_executable_path(temp_dir, file_path, target_platform): file_path (str) -- the file path to the executable to rename in paths.json target_platform (str) -- the platform to target: 'unix' or 'win' """ - if target_platform == 'win': - if os.path.basename(file_path).startswith('.') or is_binary_file(temp_dir, file_path): - renamed_executable_path = re.sub(r'\Abin', 'Scripts', file_path) + if target_platform == "win": + if os.path.basename(file_path).startswith(".") or is_binary_file( + temp_dir, file_path + ): + renamed_executable_path = re.sub(r"\Abin", "Scripts", file_path) else: - renamed_path = os.path.splitext(re.sub(r'\Abin', 'Scripts', file_path))[0] - renamed_executable_path = '{}-script.py' .format(renamed_path) + renamed_path = os.path.splitext(re.sub(r"\Abin", "Scripts", file_path))[0] + renamed_executable_path = f"{renamed_path}-script.py" - elif target_platform == 'unix': - renamed_path = re.sub(r'\AScripts', 'bin', file_path) - renamed_executable_path = renamed_path.replace('-script.py', '') + elif target_platform == "unix": + renamed_path = re.sub(r"\AScripts", "bin", file_path) + renamed_executable_path = renamed_path.replace("-script.py", "") return renamed_executable_path @@ -323,7 +347,7 @@ def update_executable_sha(package_directory, executable_path): script files which requires to update the sha. """ - with open(os.path.join(package_directory, executable_path), 'rb') as script_file: + with open(os.path.join(package_directory, executable_path), "rb") as script_file: script_file_contents = script_file.read() return hashlib.sha256(script_file_contents).hexdigest() @@ -353,14 +377,16 @@ def add_new_windows_path(executable_directory, executable): executable_directory (str) -- the file path to temporary directory's 'Scripts' directory executable (str) -- the filename of the script to add to paths.json """ - with open(os.path.join(executable_directory, executable), 'rb') as script_file: + with open(os.path.join(executable_directory, executable), "rb") as script_file: script_file_contents = script_file.read() - new_path = {"_path": "Scripts/{}" .format(executable), - "path_type": "hardlink", - "sha256": hashlib.sha256(script_file_contents).hexdigest(), - "size_in_bytes": os.path.getsize( - os.path.join(executable_directory, executable)) - } + new_path = { + "_path": f"Scripts/{executable}", + "path_type": "hardlink", + "sha256": hashlib.sha256(script_file_contents).hexdigest(), + "size_in_bytes": os.path.getsize( + os.path.join(executable_directory, executable) + ), + } return new_path @@ -372,46 +398,56 @@ def update_paths_file(temp_dir, target_platform): package's extracted contents target_platform (str) -- the platform to target: 'unix' or 'win' """ - paths_file = os.path.join(temp_dir, 'info/paths.json') + paths_file = os.path.join(temp_dir, "info/paths.json") if os.path.isfile(paths_file): with open(paths_file) as file: paths = json.load(file) - if target_platform == 'win': - for path in paths['paths']: - if path['_path'].startswith('lib'): - path['_path'] = update_lib_path(path['_path'], 'win') + if target_platform == "win": + for path in paths["paths"]: + if path["_path"].startswith("lib"): + path["_path"] = update_lib_path(path["_path"], "win") - elif path['_path'].startswith('bin'): - path['_path'] = update_executable_path(temp_dir, path['_path'], 'win') - path['sha256'] = update_executable_sha(temp_dir, path['_path']) - path['size_in_bytes'] = update_executable_size(temp_dir, path['_path']) + elif path["_path"].startswith("bin"): + path["_path"] = update_executable_path( + temp_dir, path["_path"], "win" + ) + path["sha256"] = update_executable_sha(temp_dir, path["_path"]) + path["size_in_bytes"] = update_executable_size( + temp_dir, path["_path"] + ) - path['_path'] = path['_path'].replace('\\', '/').replace('\\\\', '/') + path["_path"] = path["_path"].replace("\\", "/").replace("\\\\", "/") - script_directory = os.path.join(temp_dir, 'Scripts') + script_directory = os.path.join(temp_dir, "Scripts") if os.path.isdir(script_directory): for script in os.listdir(script_directory): - if script.endswith('.exe'): - paths['paths'].append(add_new_windows_path(script_directory, script)) - - elif target_platform == 'unix': - for path in paths['paths']: - if path['_path'].startswith('Lib'): - path['_path'] = update_lib_path(path['_path'], 'unix', temp_dir) - - elif path['_path'].startswith('Scripts'): - path['_path'] = update_executable_path(temp_dir, path['_path'], 'unix') - path['sha256'] = update_executable_sha(temp_dir, path['_path']) - path['size_in_bytes'] = update_executable_size(temp_dir, path['_path']) - - path['_path'] = path['_path'].replace('\\', '/').replace('\\\\', '/') - - if path['_path'].endswith(('.bat', '.exe')): - paths['paths'].remove(path) - - with open(paths_file, 'w') as file: + if script.endswith(".exe"): + paths["paths"].append( + add_new_windows_path(script_directory, script) + ) + + elif target_platform == "unix": + for path in paths["paths"]: + if path["_path"].startswith("Lib"): + path["_path"] = update_lib_path(path["_path"], "unix", temp_dir) + + elif path["_path"].startswith("Scripts"): + path["_path"] = update_executable_path( + temp_dir, path["_path"], "unix" + ) + path["sha256"] = update_executable_sha(temp_dir, path["_path"]) + path["size_in_bytes"] = update_executable_size( + temp_dir, path["_path"] + ) + + path["_path"] = path["_path"].replace("\\", "/").replace("\\\\", "/") + + if path["_path"].endswith((".bat", ".exe")): + paths["paths"].remove(path) + + with open(paths_file, "w") as file: json.dump(paths, file, indent=2) @@ -442,11 +478,12 @@ def is_binary_file(directory, executable): file_path = os.path.join(directory, executable) if os.path.isfile(file_path): - with open(file_path, 'rb') as buffered_file: + with open(file_path, "rb") as buffered_file: file_contents = buffered_file.read(1024) - text_characters = bytearray({7, 8, 9, 10, 12, 13, 27}.union( - set(range(0x20, 0x100)) - {0x7f})) + text_characters = bytearray( + {7, 8, 9, 10, 12, 13, 27}.union(set(range(0x20, 0x100)) - {0x7F}) + ) return bool(file_contents.translate(None, text_characters)) @@ -468,31 +505,33 @@ def rename_executable(directory, executable, target_platform): """ old_executable_path = os.path.join(directory, executable) - if target_platform == 'win': - new_executable_path = os.path.join(directory, '{}-script.py' .format( - retrieve_executable_name(executable))) + if target_platform == "win": + new_executable_path = os.path.join( + directory, f"{retrieve_executable_name(executable)}-script.py" + ) with open(old_executable_path) as script_file_in: lines = script_file_in.read().splitlines() - with open(old_executable_path, 'w') as script_file_out: + with open(old_executable_path, "w") as script_file_out: for line in lines[1:]: - script_file_out.write(line + '\n') + script_file_out.write(line + "\n") os.renames(old_executable_path, new_executable_path) else: - if old_executable_path.endswith('.py'): - - new_executable_path = old_executable_path.replace('-script.py', '') + if old_executable_path.endswith(".py"): + new_executable_path = old_executable_path.replace("-script.py", "") with open(old_executable_path) as script_file_in: lines = script_file_in.read().splitlines() - with open(old_executable_path, 'w') as script_file_out: - script_file_out.write('#!/opt/anaconda1anaconda2anaconda3/bin/python' + '\n') + with open(old_executable_path, "w") as script_file_out: + script_file_out.write( + "#!/opt/anaconda1anaconda2anaconda3/bin/python" + "\n" + ) for line in lines: - script_file_out.write(line + '\n') + script_file_out.write(line + "\n") os.renames(old_executable_path, new_executable_path) @@ -507,7 +546,7 @@ def remove_executable(directory, executable): directory (str) -- the file path to the 'Scripts' directory executable (str) -- the filename of the executable to remove """ - if executable.endswith(('.exe', '.bat')): + if executable.endswith((".exe", ".bat")): script = os.path.join(directory, executable) os.remove(script) @@ -522,13 +561,13 @@ def create_exe_file(directory, executable, target_platform): """ exe_directory = os.path.dirname(__file__) - if target_platform.endswith('32'): - executable_file = os.path.join(exe_directory, 'cli-32.exe') + if target_platform.endswith("32"): + executable_file = os.path.join(exe_directory, "cli-32.exe") else: - executable_file = os.path.join(exe_directory, 'cli-64.exe') + executable_file = os.path.join(exe_directory, "cli-64.exe") - renamed_executable_file = os.path.join(directory, '{}.exe' .format(executable)) + renamed_executable_file = os.path.join(directory, f"{executable}.exe") shutil.copyfile(executable_file, renamed_executable_file) @@ -544,9 +583,9 @@ def update_prefix_file(temp_dir, prefixes): package's extracted contents prefixes (List[str])-- the prefixes to write to 'has_prefix' """ - has_prefix_file = os.path.join(temp_dir, 'info/has_prefix') + has_prefix_file = os.path.join(temp_dir, "info/has_prefix") - with open(has_prefix_file, 'w+') as prefix_file: + with open(has_prefix_file, "w+") as prefix_file: for prefix in prefixes: prefix_file.write(prefix) @@ -562,20 +601,20 @@ def update_files_file(temp_dir, verbose): package's extracted contents verbose (bool) -- show output of items that are updated """ - files_file = os.path.join(temp_dir, 'info/files') + files_file = os.path.join(temp_dir, "info/files") - with open(files_file, 'w') as files: + with open(files_file, "w") as files: file_paths = [] for dirpath, dirnames, filenames in walk(temp_dir): relative_dir = os.path.relpath(dirpath, temp_dir) filenames = [os.path.join(relative_dir, f) for f in filenames] - for filename in filter_info_files(filenames, ''): - file_paths.append(filename.replace('\\', '/').replace('\\\\', '/')) + for filename in filter_info_files(filenames, ""): + file_paths.append(filename.replace("\\", "/").replace("\\\\", "/")) if verbose: - print('Updating {}' .format(filename)) + print(f"Updating {filename}") for file_path in sorted(file_paths): - files.write(file_path + '\n') + files.write(file_path + "\n") def create_target_archive(file_path, temp_dir, platform, output_dir): @@ -595,7 +634,7 @@ def create_target_archive(file_path, temp_dir, platform, output_dir): destination = os.path.join(output_directory, os.path.basename(file_path)) - with tarfile.open(destination, 'w:bz2') as target: + with tarfile.open(destination, "w:bz2") as target: for dirpath, dirnames, filenames in walk(temp_dir): relative_dir = os.path.relpath(dirpath, temp_dir) filenames = [os.path.join(relative_dir, f) for f in filenames] @@ -603,7 +642,9 @@ def create_target_archive(file_path, temp_dir, platform, output_dir): target.add(os.path.join(temp_dir, filename), arcname=filename) -def convert_between_unix_platforms(file_path, output_dir, platform, dependencies, verbose): +def convert_between_unix_platforms( + file_path, output_dir, platform, dependencies, verbose +): """Convert package between unix platforms. Positional arguments: @@ -623,8 +664,9 @@ def convert_between_unix_platforms(file_path, output_dir, platform, dependencies shutil.rmtree(temp_dir) -def convert_between_windows_architechtures(file_path, output_dir, platform, - dependencies, verbose): +def convert_between_windows_architechtures( + file_path, output_dir, platform, dependencies, verbose +): """Convert package between windows architectures. Positional arguments: @@ -644,7 +686,9 @@ def convert_between_windows_architechtures(file_path, output_dir, platform, shutil.rmtree(temp_dir) -def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, verbose): +def convert_from_unix_to_windows( + file_path, output_dir, platform, dependencies, verbose +): """Convert a package from a unix platform to windows. Positional arguments: @@ -660,27 +704,31 @@ def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, for entry in os.listdir(temp_dir): directory = os.path.join(temp_dir, entry) - if os.path.isdir(directory) and entry.strip(os.sep) == 'lib': - update_lib_contents(directory, temp_dir, 'win', file_path) + if os.path.isdir(directory) and entry.strip(os.sep) == "lib": + update_lib_contents(directory, temp_dir, "win", file_path) - if os.path.isdir(directory) and entry.strip(os.sep) == 'bin': + if os.path.isdir(directory) and entry.strip(os.sep) == "bin": for script in os.listdir(directory): - if (os.path.isfile(os.path.join(directory, script)) and - not is_binary_file(directory, script) and - not script.startswith('.')): - rename_executable(directory, script, 'win') - create_exe_file(directory, retrieve_executable_name(script), - platform) - - prefixes.add('/opt/anaconda1anaconda2anaconda3 text Scripts/{}-script.py\n' - .format(retrieve_executable_name(script))) - - new_bin_path = os.path.join(temp_dir, 'Scripts') + if ( + os.path.isfile(os.path.join(directory, script)) + and not is_binary_file(directory, script) + and not script.startswith(".") + ): + rename_executable(directory, script, "win") + create_exe_file( + directory, retrieve_executable_name(script), platform + ) + + prefixes.add( + f"/opt/anaconda1anaconda2anaconda3 text Scripts/{retrieve_executable_name(script)}-script.py\n" + ) + + new_bin_path = os.path.join(temp_dir, "Scripts") os.renames(directory, new_bin_path) update_index_file(temp_dir, platform, dependencies, verbose) update_prefix_file(temp_dir, prefixes) - update_paths_file(temp_dir, target_platform='win') + update_paths_file(temp_dir, target_platform="win") update_files_file(temp_dir, verbose) create_target_archive(file_path, temp_dir, platform, output_dir) @@ -688,7 +736,9 @@ def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, shutil.rmtree(temp_dir) -def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, verbose): +def convert_from_windows_to_unix( + file_path, output_dir, platform, dependencies, verbose +): """Convert a package from windows to a unix platform. Positional arguments: @@ -705,24 +755,25 @@ def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, for entry in os.listdir(temp_dir): directory = os.path.join(temp_dir, entry) - if os.path.isdir(directory) and 'Lib' in directory: - update_lib_contents(directory, temp_dir, 'unix', file_path) + if os.path.isdir(directory) and "Lib" in directory: + update_lib_contents(directory, temp_dir, "unix", file_path) - if os.path.isdir(directory) and 'Scripts' in directory: + if os.path.isdir(directory) and "Scripts" in directory: for script in os.listdir(directory): - if not is_binary_file(directory, script) and not script.startswith('.'): - rename_executable(directory, script, 'unix') + if not is_binary_file(directory, script) and not script.startswith("."): + rename_executable(directory, script, "unix") remove_executable(directory, script) - prefixes.add('/opt/anaconda1anaconda2anaconda3 text bin/{}\n' - .format(retrieve_executable_name(script))) + prefixes.add( + f"/opt/anaconda1anaconda2anaconda3 text bin/{retrieve_executable_name(script)}\n" + ) - new_bin_path = os.path.join(temp_dir, 'bin') + new_bin_path = os.path.join(temp_dir, "bin") os.renames(directory, new_bin_path) update_index_file(temp_dir, platform, dependencies, verbose) update_prefix_file(temp_dir, prefixes) - update_paths_file(temp_dir, target_platform='unix') + update_paths_file(temp_dir, target_platform="unix") update_files_file(temp_dir, verbose) create_target_archive(file_path, temp_dir, platform, output_dir) @@ -730,72 +781,105 @@ def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, shutil.rmtree(temp_dir) -def conda_convert(file_path, output_dir=".", show_imports=False, platforms=None, force=False, - dependencies=None, verbose=False, quiet=False, dry_run=False): +def conda_convert( + file_path: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = False, + dry_run: bool = False, +) -> None: """Convert a conda package between different platforms and architectures. Positional arguments: file_path (str) -- the file path to the source package's tar file output_dir (str) -- the file path to where to output the converted tar file show_imports (bool) -- show all C extensions found in the source package - platforms (str) -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', + platforms list[str] -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', 'linux-32', 'osx-64', or 'all' force (bool) -- force conversion of packages that contain C extensions - dependencies (List[str]) -- the new dependencies to add to the source package's + dependencies (list[str]) -- the new dependencies to add to the source package's existing dependencies verbose (bool) -- show output of items that are updated quiet (bool) -- hide all output except warnings and errors dry_run (bool) -- show which conversions will take place """ + + platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) + if show_imports: imports = retrieve_c_extensions(file_path) if len(imports) == 0: - print('No imports found.') + print("No imports found.") else: for c_extension in imports: print(c_extension) sys.exit() if not show_imports and len(platforms) == 0: - sys.exit('Error: --platform option required for conda package conversion.') + sys.exit("Error: --platform option required for conda package conversion.") if len(retrieve_c_extensions(file_path)) > 0 and not force: - sys.exit('WARNING: Package {} contains C extensions; skipping conversion. ' - 'Use -f to force conversion.' .format(os.path.basename(file_path))) - - conversion_platform, source_platform, architecture = retrieve_package_platform(file_path) - source_platform_architecture = '{}-{}' .format(source_platform, architecture) - - if 'all' in platforms: - platforms = ['osx-64', 'osx-arm64', - 'linux-32', 'linux-64', 'linux-ppc64', 'linux-ppc64le', - 'linux-s390x', 'linux-armv6l', 'linux-armv7l', 'linux-aarch64', - 'win-32', 'win-64', 'win-arm64', - ] + sys.exit( + f"WARNING: Package {os.path.basename(file_path)} contains C extensions; skipping conversion. " + "Use -f to force conversion." + ) + + conversion_platform, source_platform, architecture = retrieve_package_platform( + file_path + ) + source_platform_architecture = f"{source_platform}-{architecture}" + + if "all" in platforms: + platforms = [ + "osx-64", + "osx-arm64", + "linux-32", + "linux-64", + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + "win-32", + "win-64", + "win-arm64", + ] for platform in platforms: - if platform == source_platform_architecture: - print("Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion." .format(source_platform_architecture, platform)) + print( + f"Source platform '{source_platform_architecture}' and target platform '{platform}' are identical. " + "Skipping conversion." + ) continue if not quiet: - print('Converting {} from {} to {}' .format( - os.path.basename(file_path), source_platform_architecture, platform)) - - if platform.startswith(('osx', 'linux')) and conversion_platform == 'unix': - convert_between_unix_platforms(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith('win') and conversion_platform == 'unix': - convert_from_unix_to_windows(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith(('osx', 'linux')) and conversion_platform == 'win': - convert_from_windows_to_unix(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith('win') and conversion_platform == 'win': - convert_between_windows_architechtures(file_path, output_dir, platform, - dependencies, verbose) + print( + f"Converting {os.path.basename(file_path)} from {source_platform_architecture} to {platform}" + ) + + if platform.startswith(("osx", "linux")) and conversion_platform == "unix": + convert_between_unix_platforms( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith("win") and conversion_platform == "unix": + convert_from_unix_to_windows( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith(("osx", "linux")) and conversion_platform == "win": + convert_from_windows_to_unix( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith("win") and conversion_platform == "win": + convert_between_windows_architechtures( + file_path, output_dir, platform, dependencies, verbose + ) diff --git a/conda_build/create_test.py b/conda_build/create_test.py index afd6bfbcb9..441fe4a17c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -1,66 +1,67 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module to handle generating test files. -''' +""" +from __future__ import annotations -import os -from os.path import join, exists import json +import os +from os.path import basename, exists, isfile, join +from pathlib import Path +from typing import TYPE_CHECKING -from conda_build.utils import copy_into, ensure_list, glob, on_win, rm_rf - +from .utils import copy_into, ensure_list, on_win, rm_rf -def create_files(m, test_dir=None): - """ - Create the test files for pkg in the directory given. The resulting - test files are configuration (i.e. platform, architecture, Python and - numpy version, ...) independent. - Return False, if the package has no tests (for any configuration), and - True if it has. - """ - if not test_dir: - test_dir = m.config.test_dir - has_files = False - if not os.path.isdir(test_dir): - os.makedirs(test_dir) +if TYPE_CHECKING: + from .metadata import MetaData - for pattern in ensure_list(m.get_value('test/files', [])): - has_files = True - files = glob(join(m.path, pattern.replace('/', os.sep))) - for f in files: - copy_into(f, f.replace(m.path, test_dir), m.config.timeout, locking=False, - clobber=True) - return has_files +def create_files(m: MetaData, test_dir: Path) -> bool: + """ + Copy all test files from recipe over into testing directory. -def _get_output_script_name(m, win_status): + :param metadata: The meta.yaml object. + :param test_dir: The testing directory. + :return: Whether any test scripts copied were copied over. + """ + patterns = ensure_list(m.get_value("test/files", [])) + for pattern in patterns: + for file in Path(m.path).rglob(pattern): + copy_into( + file, + test_dir / file.relative_to(m.path), + m.config.timeout, + locking=False, + clobber=True, + ) + return bool(patterns) + + +def _get_output_script_name( + m: MetaData, + win_status: bool, +) -> tuple[os.PathLike, os.PathLike]: # the way this works is that each output needs to explicitly define a test script to run. # They do not automatically pick up run_test.*, but can be pointed at that explicitly. - ext = '.bat' if win_status else '.sh' - dst_name = 'run_test' + ext + ext = ".bat" if win_status else ".sh" + dst_name = "run_test" + ext src_name = dst_name if m.is_output: - src_name = 'no-file' - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') + src_name = "no-file" + for out in m.get_section("outputs"): + if m.name() == out.get("name"): + out_test_script = out.get("test", {}).get("script", "no-file") if os.path.splitext(out_test_script)[1].lower() == ext: src_name = out_test_script break return src_name, dst_name -def create_shell_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - - win_status = [on_win] - - if m.noarch: - win_status = [False, True] +def create_shell_files(m: MetaData, test_dir: os.PathLike) -> list[str]: + win_status = [False, True] if m.noarch else [on_win] shell_files = [] for status in win_status: @@ -68,49 +69,60 @@ def create_shell_files(m, test_dir=None): dest_file = join(test_dir, dst_name) if exists(join(m.path, src_name)): # disable locking to avoid locking a temporary directory (the extracted test folder) - copy_into(join(m.path, src_name), dest_file, m.config.timeout, locking=False) - if os.path.basename(test_dir) != 'test_tmp': - commands = ensure_list(m.get_value('test/commands', [])) - if commands: - with open(join(dest_file), 'a') as f: - f.write('\n\n') - if not status: - f.write('set -ex\n\n') - f.write('\n\n') - for cmd in commands: - f.write(cmd) - f.write('\n') - if status: - f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") + copy_into( + join(m.path, src_name), + dest_file, + m.config.timeout, + locking=False, + ) + commands = ensure_list(m.get_value("test/commands", [])) + if commands: + with open(join(dest_file), "a") as f: + f.write("\n\n") + if not status: + f.write("set -ex\n\n") + f.write("\n\n") + for cmd in commands: + f.write(cmd) + f.write("\n") if status: - f.write('exit /B 0\n') - else: - f.write('exit 0\n') - if os.path.isfile(dest_file): + f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") + if status: + f.write("exit /B 0\n") + else: + f.write("exit 0\n") + if isfile(dest_file): shell_files.append(dest_file) return shell_files -def _create_test_files(m, test_dir, ext, comment_char='# '): - name = 'run_test' + ext +def _create_test_files( + m: MetaData, + test_dir: os.PathLike, + ext: str, + comment_char: str = "# ", +) -> tuple[os.PathLike, bool]: + name = "run_test" + ext if m.is_output: - name = '' + name = "" # the way this works is that each output needs to explicitly define a test script to run # They do not automatically pick up run_test.*, but can be pointed at that explicitly. - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') + for out in m.get_section("outputs"): + if m.name() == out.get("name"): + out_test_script = out.get("test", {}).get("script", "no-file") if out_test_script.endswith(ext): name = out_test_script break - out_file = join(test_dir, 'run_test' + ext) + out_file = join(test_dir, "run_test" + ext) if name: - test_file = os.path.join(m.path, name) - if os.path.isfile(test_file): - with open(out_file, 'w') as fo: - fo.write(f"{comment_char} tests for {m.dist()} (this is a generated file);\n") - fo.write("print('===== testing package: %s =====');\n" % m.dist()) + test_file = join(m.path, name) + if isfile(test_file): + with open(out_file, "w") as fo: + fo.write( + f"{comment_char} tests for {m.dist()} (this is a generated file);\n" + ) + fo.write(f"print('===== testing package: {m.dist()} =====');\n") try: with open(test_file) as fi: @@ -119,144 +131,169 @@ def _create_test_files(m, test_dir, ext, comment_char='# '): fo.write(fi.read()) fo.write(f"{comment_char} --- {name} (end) ---\n") except AttributeError: - fo.write("# tests were not packaged with this module, and cannot be run\n") - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) - return (out_file, bool(name) and os.path.isfile(out_file) and os.path.basename(test_file) != 'no-file') + fo.write( + "# tests were not packaged with this module, and cannot be run\n" + ) + fo.write(f"\nprint('===== {m.dist()} OK =====');\n") + return ( + out_file, + bool(name) and isfile(out_file) and basename(test_file) != "no-file", + ) -def create_py_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.py') +def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: + tf, tf_exists = _create_test_files(m, test_dir, ".py") # Ways in which we can mark imports as none python imports # 1. preface package name with r-, lua- or perl- # 2. use list of dicts for test/imports, and have lang set in those dicts pkg_name = m.name() - likely_r_pkg = pkg_name.startswith('r-') - likely_lua_pkg = pkg_name.startswith('lua-') - likely_perl_pkg = pkg_name.startswith('perl-') + likely_r_pkg = pkg_name.startswith("r-") + likely_lua_pkg = pkg_name.startswith("lua-") + likely_perl_pkg = pkg_name.startswith("perl-") likely_non_python_pkg = likely_r_pkg or likely_lua_pkg or likely_perl_pkg if likely_non_python_pkg: imports = [] - for import_item in ensure_list(m.get_value('test/imports', [])): + for import_item in ensure_list(m.get_value("test/imports", [])): # add any imports specifically marked as python - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'python'): - imports.extend(import_item['imports']) + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "python" + ): + imports.extend(import_item["imports"]) else: - imports = ensure_list(m.get_value('test/imports', [])) - imports = [item for item in imports if (not hasattr(item, 'keys') or - 'lang' in item and item['lang'] == 'python')] + imports = ensure_list(m.get_value("test/imports", [])) + imports = [ + item + for item in imports + if ( + not hasattr(item, "keys") or "lang" in item and item["lang"] == "python" + ) + ] if imports: - with open(tf, 'a') as fo: + with open(tf, "a") as fo: for name in imports: - fo.write('print("import: %r")\n' % name) - fo.write('import %s\n' % name) - fo.write('\n') + fo.write(f'print("import: {name!r}")\n') + fo.write(f"import {name}\n") + fo.write("\n") return tf if (tf_exists or imports) else False -def create_r_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.r') +def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: + tf, tf_exists = _create_test_files(m, test_dir, ".r") imports = None # two ways we can enable R import tests: # 1. preface package name with r- and just list imports in test/imports # 2. use list of dicts for test/imports, and have lang: 'r' set in one of those dicts - if m.name().startswith('r-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("r-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'r'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "r" + ): + imports = import_item["imports"] break if imports: - with open(tf, 'a') as fo: + with open(tf, "a") as fo: for name in imports: - fo.write('print("library(%r)")\n' % name) - fo.write('library(%s)\n' % name) - fo.write('\n') + fo.write(f'print("library({name!r})")\n') + fo.write(f"library({name})\n") + fo.write("\n") return tf if (tf_exists or imports) else False -def create_pl_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.pl') +def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: + tf, tf_exists = _create_test_files(m, test_dir, ".pl") + imports = None - if m.name().startswith('perl-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("perl-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'perl'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "perl" + ): + imports = import_item["imports"] break if tf_exists or imports: - with open(tf, 'a') as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip('0'), - file=fo) + with open(tf, "a") as fo: + print( + r'my $expected_version = "{}";'.format(m.version().rstrip("0")), file=fo + ) if imports: for name in imports: - print(r'print("import: %s\n");' % name, file=fo) - print('use %s;\n' % name, file=fo) + print(rf'print("import: {name}\n");', file=fo) + print(f"use {name};\n", file=fo) # Don't try to print version for complex imports - if ' ' not in name: - print(("if (defined {0}->VERSION) {{\n" + - "\tmy $given_version = {0}->VERSION;\n" + - "\t$given_version =~ s/0+$//;\n" + - "\tdie('Expected version ' . $expected_version . ' but" + - " found ' . $given_version) unless ($expected_version " + - "eq $given_version);\n" + - "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" + - "\n}}").format(name), file=fo) + if " " not in name: + print( + f"if (defined {name}->VERSION) {{\n" + f"\tmy $given_version = {name}->VERSION;\n" + f"\t$given_version =~ s/0+$//;\n" + f"\tdie('Expected version ' . $expected_version . ' but" + f" found ' . $given_version) unless ($expected_version " + f"eq $given_version);\n" + f"\tprint('\tusing version ' . {name}->VERSION . '\n');\n" + f"\n}}", + file=fo, + ) return tf if (tf_exists or imports) else False -def create_lua_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.lua') +def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: + tf, tf_exists = _create_test_files(m, test_dir, ".lua") + imports = None - if m.name().startswith('lua-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("lua-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'lua'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "lua" + ): + imports = import_item["imports"] break if imports: - with open(tf, 'a+') as fo: + with open(tf, "a+") as fo: for name in imports: - print(r'print("require \"%s\"\n");' % name, file=fo) - print('require "%s"\n' % name, file=fo) + print(rf'print("require \"{name}\"\n");', file=fo) + print(f'require "{name}"\n', file=fo) return tf if (tf_exists or imports) else False -def create_all_test_files(m, test_dir=None): +def create_all_test_files( + m: MetaData, + test_dir: os.PathLike | None = None, +) -> tuple[bool, bool, bool, bool, bool, list[str]]: if test_dir: + # this happens when we're finishing the build rm_rf(test_dir) - os.makedirs(test_dir) - # this happens when we're finishing the build. - test_deps = m.meta.get('test', {}).get('requires', []) - if test_deps: - with open(os.path.join(test_dir, 'test_time_dependencies.json'), 'w') as f: - json.dump(test_deps, f) + os.makedirs(test_dir, exist_ok=True) + test_requires = ensure_list(m.get_value("test/requires", [])) + if test_requires: + Path(test_dir, "test_time_dependencies.json").write_text( + json.dumps(test_requires) + ) else: # this happens when we're running a package's tests test_dir = m.config.test_dir - - files = create_files(m, test_dir) - - pl_files = create_pl_files(m, test_dir) - py_files = create_py_files(m, test_dir) - r_files = create_r_files(m, test_dir) - lua_files = create_lua_files(m, test_dir) - shell_files = create_shell_files(m, test_dir) - return files, pl_files, py_files, r_files, lua_files, shell_files + os.makedirs(test_dir, exist_ok=True) + + return ( + create_files(m, Path(test_dir)), + create_pl_files(m, test_dir), + create_py_files(m, test_dir), + create_r_files(m, test_dir), + create_lua_files(m, test_dir), + create_shell_files(m, test_dir), + ) diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py new file mode 100644 index 0000000000..f691b5192d --- /dev/null +++ b/conda_build/deprecations.py @@ -0,0 +1,411 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Tools to aid in deprecating code.""" + +from __future__ import annotations + +import sys +import warnings +from argparse import Action +from functools import wraps +from types import ModuleType +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Any, Callable, ParamSpec, Self, TypeVar + + from packaging.version import Version + + T = TypeVar("T") + P = ParamSpec("P") + + ActionType = TypeVar("ActionType", bound=type[Action]) + +from . import __version__ + + +class DeprecatedError(RuntimeError): + pass + + +# inspired by deprecation (https://deprecation.readthedocs.io/en/latest/) and +# CPython's warnings._deprecated +class DeprecationHandler: + _version: str | None + _version_tuple: tuple[int, ...] | None + _version_object: Version | None + + def __init__(self: Self, version: str) -> None: + """Factory to create a deprecation handle for the specified version. + + :param version: The version to compare against when checking deprecation statuses. + """ + self._version = version + # Try to parse the version string as a simple tuple[int, ...] to avoid + # packaging.version import and costlier version comparisons. + self._version_tuple = self._get_version_tuple(version) + self._version_object = None + + @staticmethod + def _get_version_tuple(version: str) -> tuple[int, ...] | None: + """Return version as non-empty tuple of ints if possible, else None. + + :param version: Version string to parse. + """ + try: + return tuple(int(part) for part in version.strip().split(".")) or None + except (AttributeError, ValueError): + return None + + def _version_less_than(self: Self, version: str) -> bool: + """Test whether own version is less than the given version. + + :param version: Version string to compare against. + """ + if self._version_tuple and (version_tuple := self._get_version_tuple(version)): + return self._version_tuple < version_tuple + + # If self._version or version could not be represented by a simple + # tuple[int, ...], do a more elaborate version parsing and comparison. + # Avoid this import otherwise to reduce import time for conda activate. + from packaging.version import parse + + if self._version_object is None: + try: + self._version_object = parse(self._version) # type: ignore[arg-type] + except TypeError: + # TypeError: self._version could not be parsed + self._version_object = parse("0.0.0.dev0+placeholder") + return self._version_object < parse(version) + + def __call__( + self: Self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + warnings.warn(message, category, stacklevel=2 + stack) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def argument( + self: Self, + deprecate_in: str, + remove_in: str, + argument: str, + *, + rename: str | None = None, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Deprecation decorator for keyword arguments. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param argument: The argument to deprecate. + :param rename: Optional new argument name. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}({argument})", + # provide a default addendum if renaming and no addendum is provided + addendum=( + f"Use '{rename}' instead." if rename and not addendum else addendum + ), + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + # only warn about argument deprecations if the argument is used + if argument in kwargs: + warnings.warn(message, category, stacklevel=2 + stack) + + # rename argument deprecations as needed + value = kwargs.pop(argument, None) + if rename: + kwargs.setdefault(rename, value) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def action( + self: Self, + deprecate_in: str, + remove_in: str, + action: ActionType, + *, + addendum: str | None = None, + stack: int = 0, + ) -> ActionType: + """Wraps any argparse.Action to issue a deprecation warning.""" + + class DeprecationMixin(Action): + category: type[Warning] + help: str # override argparse.Action's help type annotation + + def __init__(inner_self: Self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=( + # option_string are ordered shortest to longest, + # use the longest as it's the most descriptive + f"`{inner_self.option_strings[-1]}`" + if inner_self.option_strings + # if not a flag/switch, use the destination itself + else f"`{inner_self.dest}`" + ), + addendum=addendum, + deprecation_type=FutureWarning, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + inner_self.category = category + inner_self.help = message + + def __call__( + inner_self: Self, + parser: ArgumentParser, + namespace: Namespace, + values: Any, + option_string: str | None = None, + ) -> None: + # alert user that it's time to remove something + warnings.warn( + inner_self.help, + inner_self.category, + stacklevel=7 + stack, + ) + + super().__call__(parser, namespace, values, option_string) + + return type(action.__name__, (DeprecationMixin, action), {}) # type: ignore[return-value] + + def module( + self: Self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for modules. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + self.topic( + deprecate_in=deprecate_in, + remove_in=remove_in, + topic=self._get_module(stack)[1], + addendum=addendum, + stack=2 + stack, + ) + + def constant( + self: Self, + deprecate_in: str, + remove_in: str, + constant: str, + value: Any, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for module constant/global. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param constant: + :param value: + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect calling module + module, fullname = self._get_module(stack) + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{fullname}.{constant}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # patch module level __getattr__ to alert user that it's time to remove something + super_getattr = getattr(module, "__getattr__", None) + + def __getattr__(name: str) -> Any: + if name == constant: + warnings.warn(message, category, stacklevel=2 + stack) + return value + + if super_getattr: + return super_getattr(name) + + raise AttributeError(f"module '{fullname}' has no attribute '{name}'") + + module.__getattr__ = __getattr__ # type: ignore[method-assign] + + def topic( + self: Self, + deprecate_in: str, + remove_in: str, + *, + topic: str, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for a topic. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param topic: The topic being deprecated. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=topic, + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + warnings.warn(message, category, stacklevel=2 + stack) + + def _get_module(self: Self, stack: int) -> tuple[ModuleType, str]: + """Detect the module from which we are being called. + + :param stack: The stacklevel increment. + :return: The module and module name. + """ + try: + frame = sys._getframe(2 + stack) + except IndexError: + # IndexError: 2 + stack is out of range + pass + else: + # Shortcut finding the module by manually inspecting loaded modules. + try: + filename = frame.f_code.co_filename + except AttributeError: + # AttributeError: frame.f_code.co_filename is undefined + pass + else: + # use a copy of sys.modules to avoid RuntimeError during iteration + # see https://github.com/conda/conda/issues/13754 + for loaded in tuple(sys.modules.values()): + if not isinstance(loaded, ModuleType): + continue + if not hasattr(loaded, "__file__"): + continue + if loaded.__file__ == filename: + return (loaded, loaded.__name__) + + # If above failed, do an expensive import and costly getmodule call. + import inspect + + module = inspect.getmodule(frame) + if module is not None: + return (module, module.__name__) + + raise DeprecatedError("unable to determine the calling module") + + def _generate_message( + self: Self, + deprecate_in: str, + remove_in: str, + prefix: str, + addendum: str | None, + *, + deprecation_type: type[Warning] = DeprecationWarning, + ) -> tuple[type[Warning] | None, str]: + """Generate the standardized deprecation message and determine whether the + deprecation is pending, active, or past. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param prefix: The message prefix, usually the function name. + :param addendum: Additional messaging. Useful to indicate what to do instead. + :param deprecation_type: The warning type to use for active deprecations. + :return: The warning category (if applicable) and the message. + """ + category: type[Warning] | None + if self._version_less_than(deprecate_in): + category = PendingDeprecationWarning + warning = f"is pending deprecation and will be removed in {remove_in}." + elif self._version_less_than(remove_in): + category = deprecation_type + warning = f"is deprecated and will be removed in {remove_in}." + else: + category = None + warning = f"was slated for removal in {remove_in}." + + return ( + category, + " ".join(filter(None, [prefix, warning, addendum])), # message + ) + + +deprecated = DeprecationHandler(__version__) diff --git a/conda_build/develop.py b/conda_build/develop.py index 6c34d63ec5..d0e3d59fd6 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -1,16 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from os.path import join, isdir, abspath, expanduser, exists +from __future__ import annotations + import shutil import sys +from os.path import abspath, exists, expanduser, isdir, join -from conda_build.post import mk_relative_osx -from conda_build.utils import check_call_env, rec_glob, get_site_packages -from conda_build.os_utils.external import find_executable +from .os_utils.external import find_executable +from .post import mk_relative_osx +from .utils import check_call_env, get_site_packages, on_mac, rec_glob def relink_sharedobjects(pkg_path, build_prefix): - ''' + """ invokes functions in post module to relink to libraries in conda env :param pkg_path: look for shared objects to relink in pkg_path @@ -20,26 +22,26 @@ def relink_sharedobjects(pkg_path, build_prefix): .. note:: develop mode builds the extensions in place and makes a link to package in site-packages/. The build_prefix points to conda environment since runtime libraries should be loaded from environment's lib/. first - ''' + """ # find binaries in package dir and make them relocatable - bin_files = rec_glob(pkg_path, ['.so']) + bin_files = rec_glob(pkg_path, [".so"]) for b_file in bin_files: - if sys.platform == 'darwin': + if on_mac: mk_relative_osx(b_file, build_prefix) else: print("Nothing to do on Linux or Windows.") def write_to_conda_pth(sp_dir, pkg_path): - ''' + """ Append pkg_path to conda.pth in site-packages directory for current environment. Only add path if it doens't already exist. :param sp_dir: path to site-packages/. directory :param pkg_path: the package path to append to site-packes/. dir. - ''' - c_file = join(sp_dir, 'conda.pth') - with open(c_file, 'a') as f: + """ + c_file = join(sp_dir, "conda.pth") + with open(c_file, "a") as f: with open(c_file) as cf: # make sure file exists, before we try to read from it hence nested # in append with block @@ -47,17 +49,17 @@ def write_to_conda_pth(sp_dir, pkg_path): pkgs_in_dev_mode = cf.readlines() # only append pkg_path if it doesn't already exist in conda.pth - if pkg_path + '\n' in pkgs_in_dev_mode: + if pkg_path + "\n" in pkgs_in_dev_mode: print("path exists, skipping " + pkg_path) else: - f.write(pkg_path + '\n') + f.write(pkg_path + "\n") print("added " + pkg_path) def get_setup_py(path_): - ''' Return full path to setup.py or exit if not found ''' + """Return full path to setup.py or exit if not found""" # build path points to source dir, builds are placed in the - setup_py = join(path_, 'setup.py') + setup_py = join(path_, "setup.py") if not exists(setup_py): sys.exit(f"No setup.py found in {path_}. Exiting.") @@ -66,21 +68,21 @@ def get_setup_py(path_): def _clean(setup_py): - ''' + """ This invokes: $ python setup.py clean :param setup_py: path to setup.py - ''' + """ # first call setup.py clean - cmd = ['python', setup_py, 'clean'] + cmd = ["python", setup_py, "clean"] check_call_env(cmd) print("Completed: " + " ".join(cmd)) print("===============================================") def _build_ext(setup_py): - ''' + """ Define a develop function - similar to build function todo: need to test on win32 and linux @@ -88,31 +90,31 @@ def _build_ext(setup_py): $ python setup.py build_ext --inplace :param setup_py: path to setup.py - ''' + """ # next call setup.py develop - cmd = ['python', setup_py, 'build_ext', '--inplace'] + cmd = ["python", setup_py, "build_ext", "--inplace"] check_call_env(cmd) print("Completed: " + " ".join(cmd)) print("===============================================") def _uninstall(sp_dir, pkg_path): - ''' + """ Look for pkg_path in conda.pth file in site-packages directory and remove it. If pkg_path is not found in conda.pth, it means package is not installed in 'development mode' via conda develop. :param sp_dir: path to site-packages/. directory :param pkg_path: the package path to be uninstalled. - ''' - o_c_pth = join(sp_dir, 'conda.pth') - n_c_pth = join(sp_dir, 'conda.pth.temp') + """ + o_c_pth = join(sp_dir, "conda.pth") + n_c_pth = join(sp_dir, "conda.pth.temp") found = False - with open(n_c_pth, 'w') as new_c: + with open(n_c_pth, "w") as new_c: with open(o_c_pth) as orig_c: for line in orig_c: - if line != pkg_path + '\n': + if line != pkg_path + "\n": new_c.write(line) else: print("uninstalled: " + pkg_path) @@ -125,21 +127,29 @@ def _uninstall(sp_dir, pkg_path): shutil.move(n_c_pth, o_c_pth) -def execute(recipe_dirs, prefix=sys.prefix, no_pth_file=False, - build_ext=False, clean=False, uninstall=False): - +def execute( + recipe_dirs: list[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: if not isdir(prefix): - sys.exit("""\ -Error: environment does not exist: %s + sys.exit( + f"""\ +Error: environment does not exist: {prefix} # # Use 'conda create' to create the environment first. -#""" % prefix) +#""" + ) - assert find_executable('python', prefix=prefix) + assert find_executable("python", prefix=prefix) # current environment's site-packages directory - sp_dir = get_site_packages(prefix, '.'.join((str(sys.version_info.major), - str(sys.version_info.minor)))) + sp_dir = get_site_packages( + prefix, ".".join((str(sys.version_info.major), str(sys.version_info.minor))) + ) if isinstance(recipe_dirs, str): recipe_dirs = [recipe_dirs] diff --git a/conda_build/environ.py b/conda_build/environ.py index 348b0bcd97..7a3a7ca8cb 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1,7 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib -import json import logging import multiprocessing import os @@ -10,60 +11,109 @@ import subprocess import sys import warnings +from collections import defaultdict from functools import lru_cache from glob import glob +from logging import getLogger from os.path import join, normpath - -from .conda_interface import (CondaError, LinkError, LockError, NoPackagesFoundError, - PaddingError, UnsatisfiableError) -from .conda_interface import display_actions, execute_actions, execute_plan, install_actions -from .conda_interface import package_cache, TemporaryDirectory -from .conda_interface import pkgs_dirs, root_dir, create_default_packages -from .conda_interface import reset_context -from .conda_interface import get_version_from_git_tag - -from conda_build import utils -from conda_build.exceptions import BuildLockError, DependencyNeedsBuildingError -from conda_build.features import feature_list -from conda_build.index import get_build_index -from conda_build.os_utils import external -from conda_build.utils import ensure_list, prepend_bin_path, env_var -from conda_build.variants import get_default_variant - +from typing import TYPE_CHECKING + +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + DEFAULTS_CHANNEL_NAME, + UNKNOWN_CHANNEL, +) +from conda.base.context import context, reset_context +from conda.common.io import env_vars +from conda.core.index import LAST_CHANNEL_URLS +from conda.core.link import PrefixSetup, UnlinkLinkTransaction +from conda.core.package_cache_data import PackageCacheData, ProgressiveFetchExtract +from conda.core.prefix_data import PrefixData +from conda.exceptions import ( + CondaError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.channel import Channel, prioritize_channels +from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord + +from . import utils +from .exceptions import BuildLockError, DependencyNeedsBuildingError +from .features import feature_list +from .index import get_build_index +from .os_utils import external +from .utils import ( + ensure_list, + env_var, + on_mac, + on_win, + package_record_to_requirement, + prepend_bin_path, +) +from .variants import get_default_variant + +if TYPE_CHECKING: + from pathlib import Path + from typing import Any, Iterable, TypedDict + + from .config import Config + from .metadata import MetaData + + class InstallActionsType(TypedDict): + PREFIX: str | os.PathLike | Path + LINK: list[PackageRecord] + + +log = getLogger(__name__) # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. -LANGUAGES = ('PERL', 'LUA', 'R', "NUMPY", 'PYTHON') -R_PACKAGES = ('r-base', 'mro-base', 'r-impl') +LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") +R_PACKAGES = ("r-base", "mro-base", "r-impl") def get_perl_ver(config): - return '.'.join(config.variant.get('perl', get_default_variant(config)['perl']).split('.')[:2]) + return ".".join( + config.variant.get("perl", get_default_variant(config)["perl"]).split(".")[:2] + ) def get_lua_ver(config): - return '.'.join(config.variant.get('lua', get_default_variant(config)['lua']).split('.')[:2]) + return ".".join( + config.variant.get("lua", get_default_variant(config)["lua"]).split(".")[:2] + ) def get_py_ver(config): - py = config.variant.get('python', get_default_variant(config)['python']) - if not hasattr(py, 'split'): + py = config.variant.get("python", get_default_variant(config)["python"]) + if not hasattr(py, "split"): py = py[0] - return '.'.join(py.split('.')[:2]) + return ".".join(py.split(".")[:2]) def get_r_ver(config): - return '.'.join(config.variant.get('r_base', - get_default_variant(config)['r_base']).split('.')[:3]) + return ".".join( + config.variant.get("r_base", get_default_variant(config)["r_base"]).split(".")[ + :3 + ] + ) def get_npy_ver(config): - conda_npy = ''.join(str(config.variant.get('numpy') or - get_default_variant(config)['numpy']).split('.')) + conda_npy = "".join( + str(config.variant.get("numpy") or get_default_variant(config)["numpy"]).split( + "." + ) + ) # Convert int -> string, e.g. # 17 -> '1.7' # 110 -> '1.10' - return conda_npy[0] + '.' + conda_npy[1:] + return conda_npy[0] + "." + conda_npy[1:] def get_lua_include_dir(config): @@ -71,8 +121,9 @@ def get_lua_include_dir(config): @lru_cache(maxsize=None) -def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=False, - expected_rev='HEAD'): +def verify_git_repo( + git_exe, git_dir, git_url, git_commits_since_tag, debug=False, expected_rev="HEAD" +): env = os.environ.copy() log = utils.get_logger(__name__) @@ -83,51 +134,61 @@ def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=Fals OK = True - env['GIT_DIR'] = git_dir + env["GIT_DIR"] = git_dir try: # Verify current commit (minus our locally applied patches) matches expected commit - current_commit = utils.check_output_env([git_exe, - "log", - "-n1", - "--format=%H", - "HEAD" + "^" * git_commits_since_tag], - env=env, stderr=stderr) - current_commit = current_commit.decode('utf-8') - expected_tag_commit = utils.check_output_env([git_exe, "log", "-n1", "--format=%H", - expected_rev], - env=env, stderr=stderr) - expected_tag_commit = expected_tag_commit.decode('utf-8') + current_commit = utils.check_output_env( + [ + git_exe, + "log", + "-n1", + "--format=%H", + "HEAD" + "^" * git_commits_since_tag, + ], + env=env, + stderr=stderr, + ) + current_commit = current_commit.decode("utf-8") + expected_tag_commit = utils.check_output_env( + [git_exe, "log", "-n1", "--format=%H", expected_rev], env=env, stderr=stderr + ) + expected_tag_commit = expected_tag_commit.decode("utf-8") if current_commit != expected_tag_commit: return False # Verify correct remote url. Need to find the git cache directory, # and check the remote from there. - cache_details = utils.check_output_env([git_exe, "remote", "-v"], env=env, - stderr=stderr) - cache_details = cache_details.decode('utf-8') - cache_dir = cache_details.split('\n')[0].split()[1] + cache_details = utils.check_output_env( + [git_exe, "remote", "-v"], env=env, stderr=stderr + ) + cache_details = cache_details.decode("utf-8") + cache_dir = cache_details.split("\n")[0].split()[1] if not isinstance(cache_dir, str): # On Windows, subprocess env can't handle unicode. - cache_dir = cache_dir.encode(sys.getfilesystemencoding() or 'utf-8') + cache_dir = cache_dir.encode(sys.getfilesystemencoding() or "utf-8") try: - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) except subprocess.CalledProcessError: - if sys.platform == 'win32' and cache_dir.startswith('/'): + if on_win and cache_dir.startswith("/"): cache_dir = utils.convert_unix_path_to_win(cache_dir) - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) - remote_details = remote_details.decode('utf-8') - remote_url = remote_details.split('\n')[0].split()[1] + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) + remote_details = remote_details.decode("utf-8") + remote_url = remote_details.split("\n")[0].split()[1] # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == 'win32' and remote_url.startswith('/'): + if on_win and remote_url.startswith("/"): remote_url = utils.convert_unix_path_to_win(git_url) if os.path.exists(remote_url): @@ -148,6 +209,24 @@ def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=Fals return OK +GIT_DESCRIBE_REGEX = re.compile( + r"(?:[_-a-zA-Z]*)" + r"(?P[a-zA-Z0-9.]+)" + r"(?:-(?P\d+)-g(?P[0-9a-f]{7,}))$" +) + + +def get_version_from_git_tag(tag): + """Return a PEP440-compliant version derived from the git status. + If that fails for any reason, return the changeset hash. + """ + m = GIT_DESCRIBE_REGEX.match(tag) + if m is None: + return None + version, post_commit, hash = m.groups() + return version if post_commit == "0" else f"{version}.post{post_commit}+{hash}" + + def get_git_info(git_exe, repo, debug): """ Given a repo to a git repo, return a dictionary of: @@ -167,18 +246,21 @@ def get_git_info(git_exe, repo, debug): # grab information from describe env = os.environ.copy() - env['GIT_DIR'] = repo + env["GIT_DIR"] = repo keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"] try: - output = utils.check_output_env([git_exe, "describe", "--tags", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--tags", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: d.update(dict(zip(keys, parts))) - d['GIT_DESCRIBE_TAG_PEP440'] = str(get_version_from_git_tag(output)) + d["GIT_DESCRIBE_TAG_PEP440"] = str(get_version_from_git_tag(output)) except subprocess.CalledProcessError: msg = ( "Failed to obtain git tag information.\n" @@ -191,35 +273,42 @@ def get_git_info(git_exe, repo, debug): # Try to get the short hash from describing with all refs (not just the tags). if "GIT_DESCRIBE_HASH" not in d: try: - output = utils.check_output_env([git_exe, "describe", "--all", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--all", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: # Don't save GIT_DESCRIBE_TAG and GIT_DESCRIBE_NUMBER because git (probably) # described a branch. We just want to save the short hash. - d['GIT_DESCRIBE_HASH'] = parts[-1] + d["GIT_DESCRIBE_HASH"] = parts[-1] except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) try: # get the _full_ hash of the current HEAD - output = utils.check_output_env([git_exe, "rev-parse", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - - d['GIT_FULL_HASH'] = output + output = utils.check_output_env( + [git_exe, "rev-parse", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + + d["GIT_FULL_HASH"] = output except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) # set up the build string if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d: - d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"], - d["GIT_DESCRIBE_HASH"]) + d["GIT_BUILD_STR"] = "{}_{}".format( + d["GIT_DESCRIBE_NUMBER"], d["GIT_DESCRIBE_HASH"] + ) # issues on Windows with the next line of the command prompt being recorded here. assert not any("\n" in value for value in d.values()) @@ -228,29 +317,41 @@ def get_git_info(git_exe, repo, debug): def get_hg_build_info(repo): env = os.environ.copy() - env['HG_DIR'] = repo + env["HG_DIR"] = repo env = {str(key): str(value) for key, value in env.items()} d = {} - cmd = ["hg", "log", "--template", - "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", - "--rev", "."] + cmd = [ + "hg", + "log", + "--template", + "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", + "--rev", + ".", + ] output = utils.check_output_env(cmd, env=env, cwd=os.path.dirname(repo)) - output = output.decode('utf-8') - rev, short_id, tag, distance, branch = output.split('|') - if tag != 'null': - d['HG_LATEST_TAG'] = tag + output = output.decode("utf-8") + rev, short_id, tag, distance, branch = output.split("|") + if tag != "null": + d["HG_LATEST_TAG"] = tag if branch == "": - branch = 'default' - d['HG_BRANCH'] = branch - d['HG_NUM_ID'] = rev - d['HG_LATEST_TAG_DISTANCE'] = distance - d['HG_SHORT_ID'] = short_id - d['HG_BUILD_STR'] = '{}_{}'.format(d['HG_NUM_ID'], d['HG_SHORT_ID']) + branch = "default" + d["HG_BRANCH"] = branch + d["HG_NUM_ID"] = rev + d["HG_LATEST_TAG_DISTANCE"] = distance + d["HG_SHORT_ID"] = short_id + d["HG_BUILD_STR"] = "{}_{}".format(d["HG_NUM_ID"], d["HG_SHORT_ID"]) return d -def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash=False, variant=None): +def get_dict( + m, + prefix=None, + for_env=True, + skip_build_id=False, + escape_backslash=False, + variant=None, +): if not prefix: prefix = m.config.host_prefix @@ -272,8 +373,7 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash d.update(os_vars(m, prefix)) # features - d.update({feat.upper(): str(int(value)) for feat, value in - feature_list}) + d.update({feat.upper(): str(int(value)) for feat, value in feature_list}) variant = variant or m.config.variant for k, v in variant.items(): @@ -283,34 +383,36 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash def conda_build_vars(prefix, config): - src_dir = config.test_dir if os.path.basename(prefix)[:2] == '_t' else config.work_dir + src_dir = ( + config.test_dir if os.path.basename(prefix)[:2] == "_t" else config.work_dir + ) return { - 'CONDA_BUILD': '1', - 'PYTHONNOUSERSITE': '1', - 'CONDA_DEFAULT_ENV': config.host_prefix, - 'ARCH': str(config.host_arch), + "CONDA_BUILD": "1", + "PYTHONNOUSERSITE": "1", + "CONDA_DEFAULT_ENV": config.host_prefix, + "ARCH": str(config.host_arch), # This is the one that is most important for where people put artifacts that get bundled. # It is fed from our function argument, and can be any of: # 1. Build prefix - when host requirements are not explicitly set, # then prefix = build prefix = host prefix # 2. Host prefix - when host requirements are explicitly set, prefix = host prefix # 3. Test prefix - during test runs, this points at the test prefix - 'PREFIX': prefix, + "PREFIX": prefix, # This is for things that are specifically build tools. Things that run on the build # platform, but probably should not be linked against, since they may not run on the # destination host platform # It can be equivalent to config.host_prefix if the host section is not explicitly set. - 'BUILD_PREFIX': config.build_prefix, - 'SYS_PREFIX': sys.prefix, - 'SYS_PYTHON': sys.executable, - 'SUBDIR': config.host_subdir, - 'build_platform': config.build_subdir, - 'SRC_DIR': src_dir, - 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), - 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), - 'REQUESTS_CA_BUNDLE': os.getenv('REQUESTS_CA_BUNDLE', ''), - 'DIRTY': '1' if config.dirty else '', - 'ROOT': root_dir, + "BUILD_PREFIX": config.build_prefix, + "SYS_PREFIX": sys.prefix, + "SYS_PYTHON": sys.executable, + "SUBDIR": config.host_subdir, + "build_platform": config.build_subdir, + "SRC_DIR": src_dir, + "HTTPS_PROXY": os.getenv("HTTPS_PROXY", ""), + "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), + "REQUESTS_CA_BUNDLE": os.getenv("REQUESTS_CA_BUNDLE", ""), + "DIRTY": "1" if config.dirty else "", + "ROOT": context.root_prefix, } @@ -320,140 +422,149 @@ def python_vars(metadata, prefix, escape_backslash): sp_dir = utils.get_site_packages(prefix, py_ver) if utils.on_win and escape_backslash: - stdlib_dir = stdlib_dir.replace('\\', '\\\\') - sp_dir = sp_dir.replace('\\', '\\\\') + stdlib_dir = stdlib_dir.replace("\\", "\\\\") + sp_dir = sp_dir.replace("\\", "\\\\") vars_ = { - 'CONDA_PY': ''.join(py_ver.split('.')[:2]), - 'PY3K': str(int(int(py_ver[0]) >= 3)), - 'PY_VER': py_ver, - 'STDLIB_DIR': stdlib_dir, - 'SP_DIR': sp_dir, - } - build_or_host = 'host' if metadata.is_cross else 'build' + "CONDA_PY": "".join(py_ver.split(".")[:2]), + "PY3K": str(int(int(py_ver[0]) >= 3)), + "PY_VER": py_ver, + "STDLIB_DIR": stdlib_dir, + "SP_DIR": sp_dir, + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'python' in deps or metadata.name(fail_ok=True) == 'python': + if "python" in deps or metadata.name() == "python": python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - python_bin = python_bin.replace('\\', '\\\\') + python_bin = python_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PYTHON': python_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PYTHON": python_bin, + } + ) - np_ver = metadata.config.variant.get('numpy', get_default_variant(metadata.config)['numpy']) - vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) - vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) - vars_['NPY_DISTUTILS_APPEND_FLAGS'] = '1' + np_ver = metadata.config.variant.get( + "numpy", get_default_variant(metadata.config)["numpy"] + ) + vars_["NPY_VER"] = ".".join(np_ver.split(".")[:2]) + vars_["CONDA_NPY"] = "".join(np_ver.split(".")[:2]) + vars_["NPY_DISTUTILS_APPEND_FLAGS"] = "1" return vars_ def perl_vars(metadata, prefix, escape_backslash): vars_ = { - 'PERL_VER': get_perl_ver(metadata.config), - 'CONDA_PERL': get_perl_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "PERL_VER": get_perl_ver(metadata.config), + "CONDA_PERL": get_perl_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'perl' in deps or metadata.name(fail_ok=True) == 'perl': + if "perl" in deps or metadata.name() == "perl": perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - perl_bin = perl_bin.replace('\\', '\\\\') + perl_bin = perl_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PERL': perl_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PERL": perl_bin, + } + ) return vars_ def lua_vars(metadata, prefix, escape_backslash): vars_ = { - 'LUA_VER': get_lua_ver(metadata.config), - 'CONDA_LUA': get_lua_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "LUA_VER": get_lua_ver(metadata.config), + "CONDA_LUA": get_lua_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'lua' in deps: + if "lua" in deps: lua_bin = metadata.config.lua_bin(prefix, metadata.config.host_subdir) lua_include_dir = get_lua_include_dir(metadata.config) if utils.on_win and escape_backslash: - lua_bin = lua_bin.replace('\\', '\\\\') - lua_include_dir = lua_include_dir.replace('\\', '\\\\') + lua_bin = lua_bin.replace("\\", "\\\\") + lua_include_dir = lua_include_dir.replace("\\", "\\\\") - vars_.update({ - 'LUA': lua_bin, - 'LUA_INCLUDE_DIR': lua_include_dir, - }) + vars_.update( + { + "LUA": lua_bin, + "LUA_INCLUDE_DIR": lua_include_dir, + } + ) return vars_ def r_vars(metadata, prefix, escape_backslash): vars_ = { - 'R_VER': get_r_ver(metadata.config), - 'CONDA_R': get_r_ver(metadata.config), - } + "R_VER": get_r_ver(metadata.config), + "CONDA_R": get_r_ver(metadata.config), + } - build_or_host = 'host' if metadata.is_cross else 'build' + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if any(r_pkg in deps for r_pkg in R_PACKAGES) or \ - metadata.name(fail_ok=True) in R_PACKAGES: + if any(r_pkg in deps for r_pkg in R_PACKAGES) or metadata.name() in R_PACKAGES: r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir) # set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages - r_user = join(prefix, 'Libs', 'R') + r_user = join(prefix, "Libs", "R") if utils.on_win and escape_backslash: - r_bin = r_bin.replace('\\', '\\\\') + r_bin = r_bin.replace("\\", "\\\\") - vars_.update({ - 'R': r_bin, - 'R_USER': r_user, - }) + vars_.update( + { + "R": r_bin, + "R_USER": r_user, + } + ) return vars_ -def meta_vars(meta, skip_build_id=False): +def meta_vars(meta: MetaData, skip_build_id=False): d = {} - for var_name in ensure_list(meta.get_value('build/script_env', [])): - if '=' in var_name: - var_name, value = var_name.split('=', 1) + for var_name in ensure_list(meta.get_value("build/script_env", [])): + if "=" in var_name: + var_name, value = var_name.split("=", 1) else: value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' is undefined." % var_name, - UserWarning + f"The environment variable '{var_name}' specified in script_env is undefined.", + UserWarning, ) else: d[var_name] = value warnings.warn( - "The environment variable '%s' is being passed through with value '%s'. " + f"The environment variable '{var_name}' is being passed through with value " + f"'{'' if meta.config.suppress_variables else value}'. " "If you are splitting build and test phases with --no-test, please ensure " - "that this value is also set similarly at test time." % - (var_name, "" if meta.config.suppress_variables else value), - UserWarning + "that this value is also set similarly at test time.", + UserWarning, ) - folder = meta.get_value('source/0/folder', '') + folder = meta.get_value("source/0/folder", "") repo_dir = join(meta.config.work_dir, folder) - git_dir = join(repo_dir, '.git') - hg_dir = join(repo_dir, '.hg') + git_dir = join(repo_dir, ".git") + hg_dir = join(repo_dir, ".hg") if not isinstance(git_dir, str): # On Windows, subprocess env can't handle unicode. - git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8') + git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8") - git_exe = external.find_executable('git', meta.config.build_prefix) + git_exe = external.find_executable("git", meta.config.build_prefix) if git_exe and os.path.exists(git_dir): # We set all 'source' metavars using the FIRST source entry in meta.yaml. - git_url = meta.get_value('source/0/git_url') + git_url = meta.get_value("source/0/git_url") if os.path.exists(git_url): - if sys.platform == 'win32': + if on_win: git_url = utils.convert_unix_path_to_win(git_url) # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) @@ -461,41 +572,45 @@ def meta_vars(meta, skip_build_id=False): _x = False if git_url: - _x = verify_git_repo(git_exe, - git_dir, - git_url, - meta.config.git_commits_since_tag, - meta.config.debug, - meta.get_value('source/0/git_rev', 'HEAD')) - - if _x or meta.get_value('source/0/path'): + _x = verify_git_repo( + git_exe, + git_dir, + git_url, + meta.config.git_commits_since_tag, + meta.config.debug, + meta.get_value("source/0/git_rev", "HEAD"), + ) + + if _x or meta.get_value("source/0/path"): d.update(get_git_info(git_exe, git_dir, meta.config.debug)) - elif external.find_executable('hg', meta.config.build_prefix) and os.path.exists(hg_dir): + elif external.find_executable("hg", meta.config.build_prefix) and os.path.exists( + hg_dir + ): d.update(get_hg_build_info(hg_dir)) - # use `get_value` to prevent early exit while name is still unresolved during rendering - d['PKG_NAME'] = meta.get_value('package/name') - d['PKG_VERSION'] = meta.version() - d['PKG_BUILDNUM'] = str(meta.build_number()) + d["PKG_NAME"] = meta.name() + d["PKG_VERSION"] = meta.version() + d["PKG_BUILDNUM"] = str(meta.build_number()) if meta.final and not skip_build_id: - d['PKG_BUILD_STRING'] = str(meta.build_id()) - d['PKG_HASH'] = meta.hash_dependencies() + d["PKG_BUILD_STRING"] = meta.build_id() + d["PKG_HASH"] = meta.hash_dependencies() else: - d['PKG_BUILD_STRING'] = 'placeholder' - d['PKG_HASH'] = '1234567' - d['RECIPE_DIR'] = meta.path + d["PKG_BUILD_STRING"] = "placeholder" + d["PKG_HASH"] = "1234567" + d["RECIPE_DIR"] = meta.path return d @lru_cache(maxsize=None) def get_cpu_count(): - if sys.platform == "darwin": + if on_mac: # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build - out, _ = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, - stdout=subprocess.PIPE).communicate() - return out.decode('utf-8').strip() + out, _ = subprocess.Popen( + "sysctl -n hw.logicalcpu", shell=True, stdout=subprocess.PIPE + ).communicate() + return out.decode("utf-8").strip() else: try: return str(multiprocessing.cpu_count()) @@ -505,13 +620,13 @@ def get_cpu_count(): def get_shlib_ext(host_platform): # Return the shared library extension. - if host_platform.startswith('win'): - return '.dll' - elif host_platform in ['osx', 'darwin']: - return '.dylib' - elif host_platform.startswith('linux'): - return '.so' - elif host_platform == 'noarch': + if host_platform.startswith("win"): + return ".dll" + elif host_platform in ["osx", "darwin"]: + return ".dylib" + elif host_platform.startswith("linux") or host_platform.endswith("-wasm32"): + return ".so" + elif host_platform == "noarch": # noarch packages should not contain shared libraries, use the system # platform if this is requested return get_shlib_ext(sys.platform) @@ -522,89 +637,91 @@ def get_shlib_ext(host_platform): def windows_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" # We have gone for the clang values here. - win_arch = 'i386' if str(m.config.host_arch) == '32' else 'amd64' - win_msvc = '19.0.0' - library_prefix = join(prefix, 'Library') - drive, tail = m.config.host_prefix.split(':') - get_default('SCRIPTS', join(prefix, 'Scripts')) - get_default('LIBRARY_PREFIX', library_prefix) - get_default('LIBRARY_BIN', join(library_prefix, 'bin')) - get_default('LIBRARY_INC', join(library_prefix, 'include')) - get_default('LIBRARY_LIB', join(library_prefix, 'lib')) - get_default('CYGWIN_PREFIX', ''.join(('/cygdrive/', drive.lower(), tail.replace('\\', '/')))) + win_arch = "i386" if str(m.config.host_arch) == "32" else "amd64" + win_msvc = "19.0.0" + library_prefix = join(prefix, "Library") + drive, tail = m.config.host_prefix.split(":") + get_default("SCRIPTS", join(prefix, "Scripts")) + get_default("LIBRARY_PREFIX", library_prefix) + get_default("LIBRARY_BIN", join(library_prefix, "bin")) + get_default("LIBRARY_INC", join(library_prefix, "include")) + get_default("LIBRARY_LIB", join(library_prefix, "lib")) + get_default( + "CYGWIN_PREFIX", "".join(("/cygdrive/", drive.lower(), tail.replace("\\", "/"))) + ) # see https://en.wikipedia.org/wiki/Environment_variable#Default_values - get_default('ALLUSERSPROFILE') - get_default('APPDATA') - get_default('CommonProgramFiles') - get_default('CommonProgramFiles(x86)') - get_default('CommonProgramW6432') - get_default('COMPUTERNAME') - get_default('ComSpec') - get_default('HOMEDRIVE') - get_default('HOMEPATH') - get_default('LOCALAPPDATA') - get_default('LOGONSERVER') - get_default('NUMBER_OF_PROCESSORS') - get_default('PATHEXT') - get_default('ProgramData') - get_default('ProgramFiles') - get_default('ProgramFiles(x86)') - get_default('ProgramW6432') - get_default('PROMPT') - get_default('PSModulePath') - get_default('PUBLIC') - get_default('SystemDrive') - get_default('SystemRoot') - get_default('TEMP') - get_default('TMP') - get_default('USERDOMAIN') - get_default('USERNAME') - get_default('USERPROFILE') - get_default('windir') + get_default("ALLUSERSPROFILE") + get_default("APPDATA") + get_default("CommonProgramFiles") + get_default("CommonProgramFiles(x86)") + get_default("CommonProgramW6432") + get_default("COMPUTERNAME") + get_default("ComSpec") + get_default("HOMEDRIVE") + get_default("HOMEPATH") + get_default("LOCALAPPDATA") + get_default("LOGONSERVER") + get_default("NUMBER_OF_PROCESSORS") + get_default("PATHEXT") + get_default("ProgramData") + get_default("ProgramFiles") + get_default("ProgramFiles(x86)") + get_default("ProgramW6432") + get_default("PROMPT") + get_default("PSModulePath") + get_default("PUBLIC") + get_default("SystemDrive") + get_default("SystemRoot") + get_default("TEMP") + get_default("TMP") + get_default("USERDOMAIN") + get_default("USERNAME") + get_default("USERPROFILE") + get_default("windir") # CPU data, see https://github.com/conda/conda-build/issues/2064 - get_default('PROCESSOR_ARCHITEW6432') - get_default('PROCESSOR_ARCHITECTURE') - get_default('PROCESSOR_IDENTIFIER') - get_default('BUILD', win_arch + '-pc-windows-' + win_msvc) + get_default("PROCESSOR_ARCHITEW6432") + get_default("PROCESSOR_ARCHITECTURE") + get_default("PROCESSOR_IDENTIFIER") + get_default("BUILD", win_arch + "-pc-windows-" + win_msvc) for k in os.environ.keys(): - if re.match('VS[0-9]{2,3}COMNTOOLS', k): + if re.match("VS[0-9]{2,3}COMNTOOLS", k): get_default(k) - elif re.match('VS[0-9]{4}INSTALLDIR', k): + elif re.match("VS[0-9]{4}INSTALLDIR", k): get_default(k) def unix_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - get_default('HOME', 'UNKNOWN') - get_default('PKG_CONFIG_PATH', join(prefix, 'lib', 'pkgconfig')) - get_default('CMAKE_GENERATOR', 'Unix Makefiles') - get_default('SSL_CERT_FILE') + get_default("HOME", "UNKNOWN") + get_default("PKG_CONFIG_PATH", join(prefix, "lib", "pkgconfig")) + get_default("CMAKE_GENERATOR", "Unix Makefiles") + get_default("SSL_CERT_FILE") def osx_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - if str(m.config.host_arch) == '32': - OSX_ARCH = 'i386' + if str(m.config.host_arch) == "32": + OSX_ARCH = "i386" MACOSX_DEPLOYMENT_TARGET = 10.9 - elif str(m.config.host_arch) == 'arm64': - OSX_ARCH = 'arm64' + elif str(m.config.host_arch) == "arm64": + OSX_ARCH = "arm64" MACOSX_DEPLOYMENT_TARGET = 11.0 else: - OSX_ARCH = 'x86_64' + OSX_ARCH = "x86_64" MACOSX_DEPLOYMENT_TARGET = 10.9 - if str(m.config.arch) == '32': - BUILD = 'i386-apple-darwin13.4.0' - elif str(m.config.arch) == 'arm64': - BUILD = 'arm64-apple-darwin20.0.0' + if str(m.config.arch) == "32": + BUILD = "i386-apple-darwin13.4.0" + elif str(m.config.arch) == "arm64": + BUILD = "arm64-apple-darwin20.0.0" else: - BUILD = 'x86_64-apple-darwin13.4.0' + BUILD = "x86_64-apple-darwin13.4.0" # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. # rpath = ' -Wl,-rpath,%(PREFIX)s/lib' % d # SIP workaround, DYLD_* no longer works. # d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d - get_default('OSX_ARCH', OSX_ARCH) - get_default('MACOSX_DEPLOYMENT_TARGET', MACOSX_DEPLOYMENT_TARGET) - get_default('BUILD', BUILD) + get_default("OSX_ARCH", OSX_ARCH) + get_default("MACOSX_DEPLOYMENT_TARGET", MACOSX_DEPLOYMENT_TARGET) + get_default("BUILD", BUILD) @lru_cache(maxsize=None) @@ -618,32 +735,35 @@ def linux_vars(m, get_default, prefix): build_arch = platform_machine # Python reports x86_64 when running a i686 Python binary on a 64-bit CPU # unless run through linux32. Issue a warning when we detect this. - if build_arch == 'x86_64' and platform_architecture[0] == '32bit': + if build_arch == "x86_64" and platform_architecture[0] == "32bit": print("Warning: You are running 32-bit Python on a 64-bit linux installation") print(" but have not launched it via linux32. Various qeuries *will*") print(" give unexpected results (uname -m, platform.machine() etc)") - build_arch = 'i686' + build_arch = "i686" # the GNU triplet is powerpc, not ppc. This matters. - if build_arch.startswith('ppc'): - build_arch = build_arch.replace('ppc', 'powerpc') - if build_arch.startswith('powerpc') or build_arch.startswith('aarch64') \ - or build_arch.startswith('s390x'): - build_distro = 'cos7' + if build_arch.startswith("ppc"): + build_arch = build_arch.replace("ppc", "powerpc") + if ( + build_arch.startswith("powerpc") + or build_arch.startswith("aarch64") + or build_arch.startswith("s390x") + ): + build_distro = "cos7" else: - build_distro = 'cos6' + build_distro = "cos6" # There is also QEMU_SET_ENV, but that needs to be # filtered so it only contains the result of `linux_vars` # which, before this change was empty, and after it only # contains other QEMU env vars. - get_default('CFLAGS') - get_default('CXXFLAGS') - get_default('LDFLAGS') - get_default('QEMU_LD_PREFIX') - get_default('QEMU_UNAME') - get_default('DEJAGNU') - get_default('DISPLAY') - get_default('LD_RUN_PATH', prefix + '/lib') - get_default('BUILD', build_arch + '-conda_' + build_distro + '-linux-gnu') + get_default("CFLAGS") + get_default("CXXFLAGS") + get_default("LDFLAGS") + get_default("QEMU_LD_PREFIX") + get_default("QEMU_UNAME") + get_default("DEJAGNU") + get_default("DISPLAY") + get_default("LD_RUN_PATH", prefix + "/lib") + get_default("BUILD", build_arch + "-conda_" + build_distro + "-linux-gnu") def set_from_os_or_variant(out_dict, key, variant, default): @@ -663,23 +783,24 @@ def system_vars(env_dict, m, prefix): return os_vars(m, prefix) -@lru_cache(maxsize=None) def os_vars(m, prefix): d = dict() # note the dictionary is passed in here - variables are set in that dict if they are non-null - get_default = lambda key, default='': set_from_os_or_variant(d, key, m.config.variant, default) + get_default = lambda key, default="": set_from_os_or_variant( + d, key, m.config.variant, default + ) - get_default('CPU_COUNT', get_cpu_count()) - get_default('LANG') - get_default('LC_ALL') - get_default('MAKEFLAGS') - d['SHLIB_EXT'] = get_shlib_ext(m.config.host_platform) - d['PATH'] = os.environ.copy()['PATH'] + get_default("CPU_COUNT", get_cpu_count()) + get_default("LANG") + get_default("LC_ALL") + get_default("MAKEFLAGS") + d["SHLIB_EXT"] = get_shlib_ext(m.config.host_platform) + d["PATH"] = os.environ.copy()["PATH"] if not m.config.activate: d = prepend_bin_path(d, m.config.host_prefix) - if sys.platform == 'win32': + if on_win: windows_vars(m, get_default, prefix) else: unix_vars(m, get_default, prefix) @@ -692,79 +813,39 @@ def os_vars(m, prefix): return d -class InvalidEnvironment(Exception): - pass - - -# Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools ) -# Vendored here to avoid the whole dependency for just this bit. -def _load_json(path): - with open(path) as fin: - x = json.load(fin) - return x - - -def _load_all_json(path): - """ - Load all json files in a directory. Return dictionary with filenames mapped to json - dictionaries. - """ - root, _, files = next(utils.walk(path)) - result = {} - for f in files: - if f.endswith('.json'): - result[f] = _load_json(join(root, f)) - return result - - -class Environment: - def __init__(self, path): - """ - Initialize an Environment object. - - To reflect changes in the underlying environment, a new Environment object should be - created. - """ - self.path = path - self._meta = join(path, 'conda-meta') - if os.path.isdir(path) and os.path.isdir(self._meta): - self._packages = {} - else: - raise InvalidEnvironment(f'Unable to load environment {path}') - - def _read_package_json(self): - if not self._packages: - self._packages = _load_all_json(self._meta) - - def package_specs(self): - """ - List all package specs in the environment. - """ - self._read_package_json() - json_objs = self._packages.values() - specs = [] - for i in json_objs: - p, v, b = i['name'], i['version'], i['build'] - specs.append(f'{p} {v} {b}') - return specs - - -cached_actions = {} +cached_precs: dict[ + tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord] +] = {} last_index_ts = 0 -def get_install_actions(prefix, specs, env, retries=0, subdir=None, - verbose=True, debug=False, locking=True, - bldpkgs_dirs=None, timeout=900, disable_pip=False, - max_env_retry=3, output_folder=None, channel_urls=None): - global cached_actions +# NOTE: The function has to retain the "get_install_actions" name for now since +# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build +# checks for this name in the call stack explicitly. +def get_install_actions( + prefix: str | os.PathLike | Path, + specs: Iterable[str | MatchSpec], + env, # unused + retries: int = 0, + subdir=None, + verbose: bool = True, + debug: bool = False, + locking: bool = True, + bldpkgs_dirs=None, + timeout=900, + disable_pip: bool = False, + max_env_retry: int = 3, + output_folder=None, + channel_urls=None, +) -> list[PackageRecord]: + global cached_precs global last_index_ts - actions = {} + log = utils.get_logger(__name__) conda_log_level = logging.WARN specs = list(specs) if specs: - specs.extend(create_default_packages) + specs.extend(context.create_default_packages) if verbose or debug: capture = contextlib.nullcontext if debug: @@ -773,20 +854,33 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, capture = utils.capture for feature, value in feature_list: if value: - specs.append('%s@' % feature) + specs.append(f"{feature}@") bldpkgs_dirs = ensure_list(bldpkgs_dirs) - index, index_ts, _ = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, - channel_urls=channel_urls, debug=debug, verbose=verbose, - locking=locking, timeout=timeout) - specs = tuple(utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) + index, index_ts, _ = get_build_index( + subdir, + list(bldpkgs_dirs)[0], + output_folder=output_folder, + channel_urls=channel_urls, + debug=debug, + verbose=verbose, + locking=locking, + timeout=timeout, + ) + specs = tuple( + utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@") + ) - if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and - last_index_ts >= index_ts): - actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() - if "PREFIX" in actions: - actions['PREFIX'] = prefix + precs: list[PackageRecord] = [] + if ( + specs, + env, + subdir, + channel_urls, + disable_pip, + ) in cached_precs and last_index_ts >= index_ts: + precs = cached_precs[(specs, env, subdir, channel_urls, disable_pip)].copy() elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -794,63 +888,105 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, with utils.LoggingContext(conda_log_level): with capture(): try: - actions = install_actions(prefix, index, specs, force=True) + precs = _install_actions(prefix, index, specs)["LINK"] except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, AssertionError, BuildLockError) as exc: - if 'lock' in str(exc): - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc) or - isinstance(exc, AssertionError)): - locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout) + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + AssertionError, + BuildLockError, + ) as exc: + if "lock" in str(exc): + log.warn( + "failed to get package records, retrying. exception was: %s", + str(exc), + ) + elif ( + "requires a minimum conda version" in str(exc) + or "link a source that does not" in str(exc) + or isinstance(exc, AssertionError) + ): + locks = utils.get_conda_operation_locks( + locking, bldpkgs_dirs, timeout + ) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + while ( + os.path.dirname(pkg_dir) not in context.pkgs_dirs + and folder < 20 + ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) - if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) + if pkg_dir in context.pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - actions = get_install_actions(prefix, tuple(specs), env, - retries=retries + 1, - subdir=subdir, - verbose=verbose, - debug=debug, - locking=locking, - bldpkgs_dirs=tuple(bldpkgs_dirs), - timeout=timeout, - disable_pip=disable_pip, - max_env_retry=max_env_retry, - output_folder=output_folder, - channel_urls=tuple(channel_urls)) + log.warn( + "failed to get package records, retrying. exception was: %s", + str(exc), + ) + precs = get_package_records( + prefix, + specs, + env, + retries=retries + 1, + subdir=subdir, + verbose=verbose, + debug=debug, + locking=locking, + bldpkgs_dirs=tuple(bldpkgs_dirs), + timeout=timeout, + disable_pip=disable_pip, + max_env_retry=max_env_retry, + output_folder=output_folder, + channel_urls=tuple(channel_urls), + ) else: - log.error("Failed to get install actions, max retries exceeded.") + log.error( + "Failed to get package records, max retries exceeded." + ) raise if disable_pip: - for pkg in ('pip', 'setuptools', 'wheel'): + for pkg in ("pip", "setuptools", "wheel"): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified - if not any(re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): - actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg] - utils.trim_empty_keys(actions) - cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() + if not any( + re.match(rf"^{pkg}(?:$|[\s=].*)", str(dep)) for dep in specs + ): + precs = [prec for prec in precs if prec.name != pkg] + cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() last_index_ts = index_ts - return actions + return precs + + +get_package_records = get_install_actions +del get_install_actions -def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, - locks=None, is_cross=False, is_conda=False): - ''' +def create_env( + prefix: str | os.PathLike | Path, + specs_or_precs: Iterable[str | MatchSpec] | Iterable[PackageRecord], + env, + config, + subdir, + clear_cache: bool = True, + retry: int = 0, + locks=None, + is_cross: bool = False, + is_conda: bool = False, +) -> None: + """ Create a conda envrionment for the given prefix and specs. - ''' + """ if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: @@ -866,94 +1002,150 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, # if os.path.isdir(prefix): # utils.rm_rf(prefix) - if specs_or_actions: # Don't waste time if there is nothing to do + specs_or_precs = tuple(ensure_list(specs_or_precs)) + if specs_or_precs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) - log.debug(str(specs_or_actions)) + log.debug(str(specs_or_precs)) if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): - # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, 'keys'): - specs = list(set(specs_or_actions)) - actions = get_install_actions(prefix, tuple(specs), env, - subdir=subdir, - verbose=config.verbose, - debug=config.debug, - locking=config.locking, - bldpkgs_dirs=tuple(config.bldpkgs_dirs), - timeout=config.timeout, - disable_pip=config.disable_pip, - max_env_retry=config.max_env_retry, - output_folder=config.output_folder, - channel_urls=tuple(config.channel_urls)) + # input is a list of specs in MatchSpec format + if not isinstance(specs_or_precs[0], PackageRecord): + precs = get_package_records( + prefix, + tuple(set(specs_or_precs)), + env, + subdir=subdir, + verbose=config.verbose, + debug=config.debug, + locking=config.locking, + bldpkgs_dirs=tuple(config.bldpkgs_dirs), + timeout=config.timeout, + disable_pip=config.disable_pip, + max_env_retry=config.max_env_retry, + output_folder=config.output_folder, + channel_urls=tuple(config.channel_urls), + ) else: - actions = specs_or_actions - index, _, _ = get_build_index(subdir=subdir, - bldpkgs_dir=config.bldpkgs_dir, - output_folder=config.output_folder, - channel_urls=config.channel_urls, - debug=config.debug, - verbose=config.verbose, - locking=config.locking, - timeout=config.timeout) - utils.trim_empty_keys(actions) - display_actions(actions, index) + precs = specs_or_precs + index, _, _ = get_build_index( + subdir=subdir, + bldpkgs_dir=config.bldpkgs_dir, + output_folder=config.output_folder, + channel_urls=config.channel_urls, + debug=config.debug, + verbose=config.verbose, + locking=config.locking, + timeout=config.timeout, + ) + _display_actions(prefix, precs) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) - with env_var('CONDA_QUIET', not config.verbose, reset_context): - with env_var('CONDA_JSON', not config.verbose, reset_context): - execute_actions(actions, index) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, BuildLockError) as exc: - if (("too short in" in str(exc) or - re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or - isinstance(exc, PaddingError)) and - config.prefix_length > 80): + with env_var("CONDA_QUIET", not config.verbose, reset_context): + with env_var("CONDA_JSON", not config.verbose, reset_context): + _execute_actions(prefix, precs) + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + BuildLockError, + ) as exc: + if ( + "too short in" in str(exc) + or re.search( + "post-link failed for: (?:[a-zA-Z]*::)?openssl", str(exc) + ) + or isinstance(exc, PaddingError) + ) and config.prefix_length > 80: if config.prefix_length_fallback: - log.warn("Build prefix failed with prefix length %d", - config.prefix_length) + log.warn( + "Build prefix failed with prefix length %d", + config.prefix_length, + ) log.warn("Error was: ") log.warn(str(exc)) - log.warn("One or more of your package dependencies needs to be rebuilt " - "with a longer prefix length.") - log.warn("Falling back to legacy prefix length of 80 characters.") - log.warn("Your package will not install into prefixes > 80 characters.") + log.warn( + "One or more of your package dependencies needs to be rebuilt " + "with a longer prefix length." + ) + log.warn( + "Falling back to legacy prefix length of 80 characters." + ) + log.warn( + "Your package will not install into prefixes > 80 characters." + ) config.prefix_length = 80 - host = '_h_env' in prefix - # Set this here and use to create environ - # Setting this here is important because we use it below (symlink) - prefix = config.host_prefix if host else config.build_prefix - actions['PREFIX'] = prefix - - create_env(prefix, actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, is_cross=is_cross) + create_env( + ( + config.host_prefix + if "_h_env" in prefix + else config.build_prefix + ), + specs_or_precs, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + is_cross=is_cross, + ) else: raise - elif 'lock' in str(exc): + elif "lock" in str(exc): if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc)): + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_precs, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) + elif "requires a minimum conda version" in str( + exc + ) or "link a source that does not" in str(exc): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + while ( + os.path.dirname(pkg_dir) not in context.pkgs_dirs + and folder < 20 + ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_precs, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -961,18 +1153,37 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. - except (AssertionError, OSError, ValueError, RuntimeError, LockError) as exc: + except ( + AssertionError, + OSError, + ValueError, + RuntimeError, + LockError, + ) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", str(exc) + ) + create_env( + prefix, + specs_or_precs, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -982,6 +1193,29 @@ def get_pkg_dirs_locks(dirs, config): return [utils.get_lock(folder, timeout=config.timeout) for folder in dirs] +def clean_pkg_cache(dist: str, config: Config) -> None: + with utils.LoggingContext(logging.DEBUG if config.debug else logging.WARN): + locks = get_pkg_dirs_locks((config.bldpkgs_dir, *context.pkgs_dirs), config) + with utils.try_acquire_locks(locks, timeout=config.timeout): + for pkgs_dir in context.pkgs_dirs: + if any( + os.path.exists(os.path.join(pkgs_dir, f"{dist}{ext}")) + for ext in ("", *CONDA_PACKAGE_EXTENSIONS) + ): + log.debug( + "Conda caching error: %s package remains in cache after removal", + dist, + ) + log.debug("manually removing to compensate") + package_cache = PackageCacheData.first_writable([pkgs_dir]) + for cache_pkg_id in package_cache.query(dist): + package_cache.remove(cache_pkg_id) + + # Note that this call acquires the relevant locks, so this must be called + # outside the lock context above. + remove_existing_packages(context.pkgs_dirs, [dist], config) + + def remove_existing_packages(dirs, fns, config): locks = get_pkg_dirs_locks(dirs, config) if config.locking else [] @@ -990,62 +1224,182 @@ def remove_existing_packages(dirs, fns, config): for fn in fns: all_files = [fn] if not os.path.isabs(fn): - all_files = glob(os.path.join(folder, fn + '*')) + all_files = glob(os.path.join(folder, fn + "*")) for entry in all_files: utils.rm_rf(entry) -def clean_pkg_cache(dist, config): - locks = [] - - conda_log_level = logging.WARN - if config.debug: - conda_log_level = logging.DEBUG - - with utils.LoggingContext(conda_log_level): - locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) - with utils.try_acquire_locks(locks, timeout=config.timeout): - rmplan = [ - 'RM_EXTRACTED {0} local::{0}'.format(dist), - 'RM_FETCHED {0} local::{0}'.format(dist), - ] - execute_plan(rmplan) - - # Conda does not seem to do a complete cleanup sometimes. This is supplemental. - # Conda's cleanup is still necessary - it keeps track of its own in-memory - # list of downloaded things. - for folder in pkgs_dirs: - if (os.path.exists(os.path.join(folder, dist)) or - os.path.exists(os.path.join(folder, dist + '.tar.bz2')) or - any(pkg_id in package_cache() for pkg_id in [dist, 'local::' + dist])): - log = utils.get_logger(__name__) - log.debug("Conda caching error: %s package remains in cache after removal", - dist) - log.debug("manually removing to compensate") - cache = package_cache() - keys = [key for key in cache.keys() if dist in key] - for pkg_id in keys: - if pkg_id in cache: - del cache[pkg_id] - - # Note that this call acquires the relevant locks, so this must be called - # outside the lock context above. - remove_existing_packages(pkgs_dirs, [dist], config) - - def get_pinned_deps(m, section): - with TemporaryDirectory(prefix='_') as tmpdir: - actions = get_install_actions(tmpdir, - tuple(m.ms_depends(section)), section, - subdir=m.config.target_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - runtime_deps = [' '.join(link.dist_name.rsplit('-', 2)) for link in actions.get('LINK', [])] - return runtime_deps + with TemporaryDirectory(prefix="_") as tmpdir: + precs = get_package_records( + tmpdir, + tuple(m.ms_depends(section)), + section, + subdir=m.config.target_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + return [package_record_to_requirement(prec) for prec in precs] + + +# NOTE: The function has to retain the "install_actions" name for now since +# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build +# checks for this name in the call stack explicitly. +def install_actions( + prefix: str | os.PathLike | Path, + index, + specs: Iterable[str | MatchSpec], +) -> InstallActionsType: + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471 + # but reduced to only the functionality actually used within conda-build. + + with env_vars( + { + "CONDA_ALLOW_NON_CHANNEL_URLS": "true", + "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false", + }, + callback=reset_context, + ): + # a hack since in conda-build we don't track channel_priority_map + channels: tuple[Channel, ...] | None + subdirs: tuple[str, ...] | None + if LAST_CHANNEL_URLS: + channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) + # tuple(dict.fromkeys(...)) removes duplicates while preserving input order. + channels = tuple( + dict.fromkeys(Channel(url) for url in channel_priority_map) + ) + subdirs = ( + tuple( + dict.fromkeys( + subdir for channel in channels if (subdir := channel.subdir) + ) + ) + or context.subdirs + ) + else: + channels = subdirs = None + + mspecs = tuple(MatchSpec(spec) for spec in specs) + + PrefixData._cache_.clear() + + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend(prefix, channels, subdirs, specs_to_add=mspecs) + if index: + # Solver can modify the index (e.g., Solver._prepare adds virtual + # package) => Copy index (just outer container, not deep copy) + # to conserve it. + solver._index = index.copy() + txn = solver.solve_for_transaction(prune=False, ignore_pinned=False) + prefix_setup = txn.prefix_setups[prefix] + return { + "PREFIX": prefix, + "LINK": [prec for prec in prefix_setup.link_precs], + } + + +_install_actions = install_actions +del install_actions + + +def _execute_actions(prefix, precs): + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 + # but reduced to only the functionality actually used within conda-build. + assert prefix + + # Always link menuinst first/last on windows in case a subsequent + # package tries to import it to create/remove a shortcut + precs = [ + *(prec for prec in precs if prec.name == "menuinst"), + *(prec for prec in precs if prec.name != "menuinst"), + ] + + progressive_fetch_extract = ProgressiveFetchExtract(precs) + progressive_fetch_extract.prepare() + + stp = PrefixSetup(prefix, (), precs, (), [], ()) + unlink_link_transaction = UnlinkLinkTransaction(stp) + + log.debug(" %s(%r)", "PROGRESSIVEFETCHEXTRACT", progressive_fetch_extract) + progressive_fetch_extract.execute() + log.debug(" %s(%r)", "UNLINKLINKTRANSACTION", unlink_link_transaction) + unlink_link_transaction.execute() + + +def _display_actions(prefix, precs): + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 + # but reduced to only the functionality actually used within conda-build. + + builder = ["", "## Package Plan ##\n"] + if prefix: + builder.append(f" environment location: {prefix}") + builder.append("") + print("\n".join(builder)) + + show_channel_urls = context.show_channel_urls + + def channel_str(rec): + if rec.get("schannel"): + return rec["schannel"] + if rec.get("url"): + return Channel(rec["url"]).canonical_name + if rec.get("channel"): + return Channel(rec["channel"]).canonical_name + return UNKNOWN_CHANNEL + + def channel_filt(s): + if show_channel_urls is False: + return "" + if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: + return "" + return s + + packages = defaultdict(lambda: "") + features = defaultdict(lambda: "") + channels = defaultdict(lambda: "") + + for prec in precs: + assert isinstance(prec, PackageRecord) + pkg = prec["name"] + channels[pkg] = channel_filt(channel_str(prec)) + packages[pkg] = prec["version"] + "-" + prec["build"] + features[pkg] = ",".join(prec.get("features") or ()) + + fmt = {} + if packages: + maxpkg = max(len(p) for p in packages) + 1 + maxver = max(len(p) for p in packages.values()) + maxfeatures = max(len(p) for p in features.values()) + maxchannels = max(len(p) for p in channels.values()) + for pkg in packages: + # That's right. I'm using old-style string formatting to generate a + # string with new-style string formatting. + fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" + if maxchannels: + fmt[pkg] += f" {{channel:<{maxchannels}}}" + if features[pkg]: + fmt[pkg] += f" [{{features:<{maxfeatures}}}]" + + lead = " " * 4 + + def format(s, pkg): + return lead + s.format( + pkg=pkg + ":", + vers=packages[pkg], + channel=channels[pkg], + features=features[pkg], + ) + + if packages: + print("\nThe following NEW packages will be INSTALLED:\n") + for pkg in sorted(packages): + print(format(fmt[pkg], pkg)) + print() diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index 72d62a377d..9744ca14b4 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import textwrap + SEPARATOR = "-" * 70 indent = lambda s: textwrap.fill(textwrap.dedent(s)) @@ -20,16 +21,20 @@ def __init__(self, original, *args, **kwargs): self.original = original def error_msg(self): - return "\n".join([ - SEPARATOR, - self.error_body(), - self.indented_exception(), - ]) + return "\n".join( + [ + SEPARATOR, + self.error_body(), + self.indented_exception(), + ] + ) def error_body(self): - return "\n".join([ - "Unable to parse meta.yaml file\n", - ]) + return "\n".join( + [ + "Unable to parse meta.yaml file\n", + ] + ) def indented_exception(self): orig = str(self.original) @@ -39,13 +44,17 @@ def indented_exception(self): class UnableToParseMissingJinja2(UnableToParse): def error_body(self): - return "\n".join([ - super().error_body(), - indent("""\ + return "\n".join( + [ + super().error_body(), + indent( + """\ It appears you are missing jinja2. Please install that package, then attempt to build. - """), - ]) + """ + ), + ] + ) class MissingDependency(CondaBuildException): @@ -61,7 +70,9 @@ def __init__(self, error, script, *args): class DependencyNeedsBuildingError(CondaBuildException): - def __init__(self, conda_exception=None, packages=None, subdir=None, *args, **kwargs): + def __init__( + self, conda_exception=None, packages=None, subdir=None, *args, **kwargs + ): self.subdir = subdir self.matchspecs = [] if packages: @@ -69,23 +80,23 @@ def __init__(self, conda_exception=None, packages=None, subdir=None, *args, **kw else: self.packages = packages or [] for line in str(conda_exception).splitlines(): - if not line.startswith(' - ') and (':' in line or ' -> ' not in line): + if not line.startswith(" - ") and (":" in line or " -> " not in line): continue - pkg = line.lstrip(' - ').split(' -> ')[-1] + pkg = line.lstrip(" - ").split(" -> ")[-1] self.matchspecs.append(pkg) - pkg = pkg.strip().split(' ')[0].split('=')[0].split('[')[0] + pkg = pkg.strip().split(" ")[0].split("=")[0].split("[")[0] self.packages.append(pkg) if not self.packages: - raise RuntimeError("failed to parse packages from exception:" - " {}".format(str(conda_exception))) + raise RuntimeError( + f"failed to parse packages from exception: {conda_exception}" + ) def __str__(self): return self.message @property def message(self): - return "Unsatisfiable dependencies for platform {}: {}".format(self.subdir, - set(self.matchspecs)) + return f"Unsatisfiable dependencies for platform {self.subdir}: {set(self.matchspecs)}" class RecipeError(CondaBuildException): @@ -93,25 +104,25 @@ class RecipeError(CondaBuildException): class BuildLockError(CondaBuildException): - """ Raised when we failed to acquire a lock. """ + """Raised when we failed to acquire a lock.""" class OverLinkingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overlinking check failed \n%s" % (error) + self.msg = f"overlinking check failed \n{error}" super().__init__(self.msg) class OverDependingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overdepending check failed \n%s" % (error) + self.msg = f"overdepending check failed \n{error}" super().__init__(self.msg) class RunPathError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "runpaths check failed \n%s" % (error) + self.msg = f"runpaths check failed \n{error}" super().__init__(self.msg) diff --git a/conda_build/features.py b/conda_build/features.py index 9fe1389e39..414b15333f 100644 --- a/conda_build/features.py +++ b/conda_build/features.py @@ -3,11 +3,10 @@ import os import sys - env_vars = [ - 'FEATURE_DEBUG', - 'FEATURE_NOMKL', - 'FEATURE_OPT', + "FEATURE_DEBUG", + "FEATURE_NOMKL", + "FEATURE_OPT", ] # list of features, where each element is a tuple(name, boolean), i.e. having @@ -15,7 +14,9 @@ feature_list = [] for key, value in os.environ.items(): if key in env_vars: - if value not in ('0', '1'): - sys.exit("Error: did not expect environment variable '%s' " - "being set to '%s' (not '0' or '1')" % (key, value)) + if value not in ("0", "1"): + sys.exit( + f"Error: did not expect environment variable '{key}' " + f"being set to '{value}' (not '0' or '1')" + ) feature_list.append((key[8:].lower(), bool(int(value)))) diff --git a/conda_build/gui-32.exe b/conda_build/gui-32.exe index bee7e543c4..289c77ca9c 100755 Binary files a/conda_build/gui-32.exe and b/conda_build/gui-32.exe differ diff --git a/conda_build/gui-64.exe b/conda_build/gui-64.exe index 366a721736..c6cdccd2c5 100755 Binary files a/conda_build/gui-64.exe and b/conda_build/gui-64.exe differ diff --git a/conda_build/index.py b/conda_build/index.py index 342c92ad16..3a2f9ab10b 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,123 +1,62 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -# Copyright (C) 2018 Anaconda, Inc -# SPDX-License-Identifier: Proprietary - -import bz2 -from collections import OrderedDict -import copy -from datetime import datetime -import functools -from itertools import groupby import json -from numbers import Number +import logging import os -from os.path import abspath, basename, getmtime, getsize, isdir, isfile, join, splitext, dirname -import subprocess -import sys import time -from uuid import uuid4 - -# Lots of conda internals here. Should refactor to use exports. -from conda.common.compat import ensure_binary - -import pytz -from jinja2 import Environment, PackageLoader -from tqdm import tqdm -import yaml -from yaml.constructor import ConstructorError -from yaml.parser import ParserError -from yaml.scanner import ScannerError -from yaml.reader import ReaderError - -import fnmatch from functools import partial -import logging -import conda_package_handling.api -from conda_package_handling.api import InvalidArchiveError - -from concurrent.futures import ProcessPoolExecutor -from concurrent.futures import Executor - -# BAD BAD BAD - conda internals -from conda.core.subdir_data import SubdirData -from conda.models.channel import Channel - -from conda_build import conda_interface, utils -from .conda_interface import MatchSpec, VersionOrder, human_bytes, context -from .conda_interface import CondaError, CondaHTTPError, get_index, url_path -from .conda_interface import TemporaryDirectory -from .conda_interface import Resolve -from .utils import (CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, FileNotFoundError, - JSONDecodeError, get_logger, glob) +from os.path import dirname + +from conda.base.context import context +from conda.core.index import get_index +from conda.exceptions import CondaHTTPError +from conda.utils import url_path +from conda_index.index import update_index as _update_index + +from . import utils +from .deprecations import deprecated +from .utils import ( + JSONDecodeError, + get_logger, +) log = get_logger(__name__) -# use this for debugging, because ProcessPoolExecutor isn't pdb/ipdb friendly -class DummyExecutor(Executor): - def map(self, func, *iterables): - for iterable in iterables: - for thing in iterable: - yield func(thing) - - -try: - from conda.base.constants import NAMESPACES_MAP, NAMESPACE_PACKAGE_NAMES -except ImportError: - NAMESPACES_MAP = { # base package name, namespace - "python": "python", - "r": "r", - "r-base": "r", - "mro-base": "r", - "mro-base_impl": "r", - "erlang": "erlang", - "java": "java", - "openjdk": "java", - "julia": "julia", - "latex": "latex", - "lua": "lua", - "nodejs": "js", - "perl": "perl", - "php": "php", - "ruby": "ruby", - "m2-base": "m2", - "msys2-conda-epoch": "m2w64", - } - NAMESPACE_PACKAGE_NAMES = frozenset(NAMESPACES_MAP) - NAMESPACES = frozenset(NAMESPACES_MAP.values()) - local_index_timestamp = 0 cached_index = None local_subdir = "" local_output_folder = "" cached_channels = [] -channel_data = {} - - -# TODO: support for libarchive seems to have broken ability to use multiple threads here. -# The new conda format is so much faster that it more than makes up for it. However, it -# would be nice to fix this at some point. -MAX_THREADS_DEFAULT = os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 -if sys.platform == 'win32': # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a - MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT) -LOCK_TIMEOUT_SECS = 3 * 3600 -LOCKFILE_NAME = ".lock" +_channel_data = {} +deprecated.constant("24.1", "24.7", "channel_data", _channel_data) # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" -def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, - omit_defaults=False, channel_urls=None, debug=False, verbose=True, - **kwargs): +def get_build_index( + subdir, + bldpkgs_dir, + output_folder=None, + clear_cache=False, + omit_defaults=False, + channel_urls=None, + debug=False, + verbose=True, + locking=None, + timeout=None, +): + """ + Used during package builds to create/get a channel including any local or + newly built packages. This function both updates and gets index data. + """ global local_index_timestamp global local_subdir global local_output_folder global cached_index global cached_channels - global channel_data + global _channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -126,17 +65,18 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, output_folder = dirname(bldpkgs_dir) # check file modification time - this is the age of our local index. - index_file = os.path.join(output_folder, subdir, 'repodata.json') + index_file = os.path.join(output_folder, subdir, "repodata.json") if os.path.isfile(index_file): mtime = os.path.getmtime(index_file) - if (clear_cache or - not os.path.isfile(index_file) or - local_subdir != subdir or - local_output_folder != output_folder or - mtime > local_index_timestamp or - cached_channels != channel_urls): - + if ( + clear_cache + or not os.path.isfile(index_file) + or local_subdir != subdir + or local_output_folder != output_folder + or mtime > local_index_timestamp + or cached_channels != channel_urls + ): # priority: (local as either croot or output_folder IF NOT EXPLICITLY IN CHANNEL ARGS), # then channels passed as args (if local in this, it remains in same order), # then channels from condarc. @@ -148,42 +88,50 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, elif verbose: log_context = partial(utils.LoggingContext, logging.WARN, loggers=loggers) else: - log_context = partial(utils.LoggingContext, logging.CRITICAL + 1, loggers=loggers) + log_context = partial( + utils.LoggingContext, logging.CRITICAL + 1, loggers=loggers + ) with log_context(): # this is where we add the "local" channel. It's a little smarter than conda, because # conda does not know about our output_folder when it is not the default setting. if os.path.isdir(output_folder): local_path = url_path(output_folder) # replace local with the appropriate real channel. Order is maintained. - urls = [url if url != 'local' else local_path for url in urls] + urls = [url if url != "local" else local_path for url in urls] if local_path not in urls: urls.insert(0, local_path) _ensure_valid_channel(output_folder, subdir) - update_index(output_folder, verbose=debug) + _delegated_update_index(output_folder, verbose=debug) # replace noarch with native subdir - this ends up building an index with both the # native content and the noarch content. - if subdir == 'noarch': - subdir = conda_interface.subdir + if subdir == "noarch": + subdir = context.subdir try: - cached_index = get_index(channel_urls=urls, - prepend=not omit_defaults, - use_local=False, - use_cache=context.offline, - platform=subdir) + # get_index() is like conda reading the index, not conda_index + # creating a new index. + cached_index = get_index( + channel_urls=urls, + prepend=not omit_defaults, + use_local=False, + use_cache=context.offline, + platform=subdir, + ) # HACK: defaults does not have the many subfolders we support. Omit it and # try again. except CondaHTTPError: - if 'defaults' in urls: - urls.remove('defaults') - cached_index = get_index(channel_urls=urls, - prepend=omit_defaults, - use_local=False, - use_cache=context.offline, - platform=subdir) - - expanded_channels = {rec.channel for rec in cached_index.values()} + if "defaults" in urls: + urls.remove("defaults") + cached_index = get_index( + channel_urls=urls, + prepend=omit_defaults, + use_local=False, + use_cache=context.offline, + platform=subdir, + ) + + expanded_channels = {rec.channel for rec in cached_index} superchannel = {} # we need channeldata.json too, as it is a more reliable source of run_exports data @@ -192,17 +140,20 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, location = channel.location if utils.on_win: location = location.lstrip("/") - elif (not os.path.isabs(channel.location) and - os.path.exists(os.path.join(os.path.sep, channel.location))): + elif not os.path.isabs(channel.location) and os.path.exists( + os.path.join(os.path.sep, channel.location) + ): location = os.path.join(os.path.sep, channel.location) - channeldata_file = os.path.join(location, channel.name, 'channeldata.json') + channeldata_file = os.path.join( + location, channel.name, "channeldata.json" + ) retry = 0 max_retries = 1 if os.path.isfile(channeldata_file): while retry < max_retries: try: with open(channeldata_file, "r+") as f: - channel_data[channel.name] = json.load(f) + _channel_data[channel.name] = json.load(f) break except (OSError, JSONDecodeError): time.sleep(0.2) @@ -211,1212 +162,69 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, # download channeldata.json for url if not context.offline: try: - channel_data[channel.name] = utils.download_channeldata(channel.base_url + '/channeldata.json') + _channel_data[channel.name] = utils.download_channeldata( + channel.base_url + "/channeldata.json" + ) except CondaHTTPError: continue # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and channel_data.get(channel.name): - packages = superchannel.get('packages', {}) - packages.update(channel_data[channel.name]) - superchannel['packages'] = packages - channel_data['defaults'] = superchannel + if channel.base_url in context.default_channels and _channel_data.get( + channel.name + ): + packages = superchannel.get("packages", {}) + packages.update(_channel_data[channel.name]) + superchannel["packages"] = packages + _channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder cached_channels = channel_urls - return cached_index, local_index_timestamp, channel_data + return cached_index, local_index_timestamp, _channel_data def _ensure_valid_channel(local_folder, subdir): - for folder in {subdir, 'noarch'}: + for folder in {subdir, "noarch"}: path = os.path.join(local_folder, folder) if not os.path.isdir(path): os.makedirs(path) -def update_index(dir_path, check_md5=False, channel_name=None, patch_generator=None, threads=MAX_THREADS_DEFAULT, - verbose=False, progress=False, hotfix_source_repo=None, subdirs=None, warn=True, - current_index_versions=None, debug=False, index_file=None): +def _delegated_update_index( + dir_path, + check_md5=False, + channel_name=None, + patch_generator=None, + threads=1, + verbose=False, + progress=False, + subdirs=None, + warn=True, + current_index_versions=None, + debug=False, +): """ - If dir_path contains a directory named 'noarch', the path tree therein is treated - as though it's a full channel, with a level of subdirs, each subdir having an update - to repodata.json. The full channel will also have a channeldata.json file. - - If dir_path does not contain a directory named 'noarch', but instead contains at least - one '*.tar.bz2' file, the directory is assumed to be a standard subdir, and only repodata.json - information will be updated. - + update_index as called by conda-build, delegating to standalone conda-index. + Needed to allow update_index calls on single subdir. """ - base_path, dirname = os.path.split(dir_path) + # conda-build calls update_index on a single subdir internally, but + # conda-index expects to index every subdir under dir_path + parent_path, dirname = os.path.split(dir_path) if dirname in utils.DEFAULT_SUBDIRS: - if warn: - log.warn("The update_index function has changed to index all subdirs at once. You're pointing it at a single subdir. " - "Please update your code to point it at the channel root, rather than a subdir.") - return update_index(base_path, check_md5=check_md5, channel_name=channel_name, - threads=threads, verbose=verbose, progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions) - return ChannelIndex(dir_path, channel_name, subdirs=subdirs, threads=threads, - deep_integrity_check=check_md5, debug=debug).index( - patch_generator=patch_generator, verbose=verbose, - progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions, - index_file=index_file) - - -def _determine_namespace(info): - if info.get('namespace'): - namespace = info['namespace'] - else: - depends_names = set() - for spec in info.get('depends', []): - try: - depends_names.add(MatchSpec(spec).name) - except CondaError: - pass - spaces = depends_names & NAMESPACE_PACKAGE_NAMES - if len(spaces) == 1: - namespace = NAMESPACES_MAP[spaces.pop()] - else: - namespace = "global" - info['namespace'] = namespace - - if not info.get('namespace_in_name') and '-' in info['name']: - namespace_prefix, reduced_name = info['name'].split('-', 1) - if namespace_prefix == namespace: - info['name_in_channel'] = info['name'] - info['name'] = reduced_name - - return namespace, info.get('name_in_channel', info['name']), info['name'] - - -def _make_seconds(timestamp): - timestamp = int(timestamp) - if timestamp > 253402300799: # 9999-12-31 - timestamp //= 1000 # convert milliseconds to seconds; see conda/conda-build#1988 - return timestamp - - -# ========================================================================== - - -REPODATA_VERSION = 1 -CHANNELDATA_VERSION = 1 -REPODATA_JSON_FN = 'repodata.json' -REPODATA_FROM_PKGS_JSON_FN = 'repodata_from_packages.json' -CHANNELDATA_FIELDS = ( - "description", - "dev_url", - "doc_url", - "doc_source_url", - "home", - "license", - "reference_package", - "source_url", - "source_git_url", - "source_git_tag", - "source_git_rev", - "summary", - "version", - "subdirs", - "icon_url", - "icon_hash", # "md5:abc123:12" - "run_exports", - "binary_prefix", - "text_prefix", - "activate.d", - "deactivate.d", - "pre_link", - "post_link", - "pre_unlink", - "tags", - "identifiers", - "keywords", - "recipe_origin", - "commits", -) - - -def _clear_newline_chars(record, field_name): - if field_name in record: - try: - record[field_name] = record[field_name].strip().replace('\n', ' ') - except AttributeError: - # sometimes description gets added as a list instead of just a string - record[field_name] = record[field_name][0].strip().replace('\n', ' ') - - -def _apply_instructions(subdir, repodata, instructions): - repodata.setdefault("removed", []) - utils.merge_or_update_dict(repodata.get('packages', {}), instructions.get('packages', {}), merge=False, - add_missing_keys=False) - # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume - # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes - new_pkg_fixes = { - k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get('packages', {}).items() - } - - utils.merge_or_update_dict(repodata.get('packages.conda', {}), new_pkg_fixes, merge=False, - add_missing_keys=False) - utils.merge_or_update_dict(repodata.get('packages.conda', {}), instructions.get('packages.conda', {}), merge=False, - add_missing_keys=False) - - for fn in instructions.get('revoke', ()): - for key in ('packages', 'packages.conda'): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == 'packages.conda': - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - if fn in repodata[key]: - repodata[key][fn]['revoked'] = True - repodata[key][fn]['depends'].append('package_has_been_revoked') - - for fn in instructions.get('remove', ()): - for key in ('packages', 'packages.conda'): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == 'packages.conda': - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - popped = repodata[key].pop(fn, None) - if popped: - repodata["removed"].append(fn) - repodata["removed"].sort() - - return repodata - - -def _get_jinja2_environment(): - def _filter_strftime(dt, dt_format): - if isinstance(dt, Number): - if dt > 253402300799: # 9999-12-31 - dt //= 1000 # convert milliseconds to seconds; see #1988 - dt = datetime.utcfromtimestamp(dt).replace(tzinfo=pytz.timezone("UTC")) - return dt.strftime(dt_format) - - def _filter_add_href(text, link, **kwargs): - if link: - kwargs_list = [f'href="{link}"'] - kwargs_list.append(f'alt="{text}"') - kwargs_list += [f'{k}="{v}"' for k, v in kwargs.items()] - return '{}'.format(' '.join(kwargs_list), text) - else: - return text - - environment = Environment( - loader=PackageLoader('conda_build', 'templates'), - ) - environment.filters['human_bytes'] = human_bytes - environment.filters['strftime'] = _filter_strftime - environment.filters['add_href'] = _filter_add_href - environment.trim_blocks = True - environment.lstrip_blocks = True - - return environment - - -def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): - # Create the temp file next "path" so that we can use an atomic move, see - # https://github.com/conda/conda-build/issues/3833 - temp_path = f'{path}.{uuid4()}' - - if not content_is_binary: - content = ensure_binary(content) - with open(temp_path, 'wb') as fh: - fh.write(content) - if write_newline_end: - fh.write(b'\n') - if isfile(path): - if utils.md5_file(temp_path) == utils.md5_file(path): - # No need to change mtimes. The contents already match. - os.unlink(temp_path) - return False - # log.info("writing %s", path) - utils.move_with_fallback(temp_path, path) - return True - - -def _make_build_string(build, build_number): - build_number_as_string = str(build_number) - if build.endswith(build_number_as_string): - build = build[:-len(build_number_as_string)] - build = build.rstrip("_") - build_string = build - return build_string - - -def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): - """ - The following dependencies do not exist in the channel and are not declared - as external dependencies: - - dependency1: - - subdir/fn1.tar.bz2 - - subdir/fn2.tar.bz2 - dependency2: - - subdir/fn3.tar.bz2 - - subdir/fn4.tar.bz2 - - The associated packages are being removed from the index. - """ - - if missing_dependencies: - builder = [ - "WARNING: The following dependencies do not exist in the channel", - " and are not declared as external dependencies:" - ] - for dep_name in sorted(missing_dependencies): - builder.append(" %s" % dep_name) - for subdir_fn in sorted(missing_dependencies[dep_name]): - builder.append(" - %s" % subdir_fn) - subdir, fn = subdir_fn.split("/") - popped = patched_repodata["packages"].pop(fn, None) - if popped: - patched_repodata["removed"].append(fn) - - builder.append("The associated packages are being removed from the index.") - builder.append('') - log.warn("\n".join(builder)) - - -def _cache_post_install_details(paths_cache_path, post_install_cache_path): - post_install_details_json = {'binary_prefix': False, 'text_prefix': False, - 'activate.d': False, 'deactivate.d': False, - 'pre_link': False, 'post_link': False, 'pre_unlink': False} - if os.path.lexists(paths_cache_path): - with open(paths_cache_path) as f: - paths = json.load(f).get('paths', []) - - # get embedded prefix data from paths.json - for f in paths: - if f.get('prefix_placeholder'): - if f.get('file_mode') == 'binary': - post_install_details_json['binary_prefix'] = True - elif f.get('file_mode') == 'text': - post_install_details_json['text_prefix'] = True - # check for any activate.d/deactivate.d scripts - for k in ('activate.d', 'deactivate.d'): - if not post_install_details_json.get(k) and f['_path'].startswith('etc/conda/%s' % k): - post_install_details_json[k] = True - # check for any link scripts - for pat in ('pre-link', 'post-link', 'pre-unlink'): - if not post_install_details_json.get(pat) and fnmatch.fnmatch(f['_path'], '*/.*-%s.*' % pat): - post_install_details_json[pat.replace("-", "_")] = True - - with open(post_install_cache_path, 'w') as fh: - json.dump(post_install_details_json, fh) - - -def _cache_recipe(tmpdir, recipe_cache_path): - recipe_path_search_order = ( - 'info/recipe/meta.yaml.rendered', - 'info/recipe/meta.yaml', - 'info/meta.yaml', - ) - for path in recipe_path_search_order: - recipe_path = os.path.join(tmpdir, path) - if os.path.lexists(recipe_path): - break - recipe_path = None - - recipe_json = {} - if recipe_path: - with open(recipe_path) as f: - try: - recipe_json = yaml.safe_load(f) - except (ConstructorError, ParserError, ScannerError, ReaderError): - pass - try: - recipe_json_str = json.dumps(recipe_json) - except TypeError: - recipe_json.get('requirements', {}).pop('build') - recipe_json_str = json.dumps(recipe_json) - with open(recipe_cache_path, 'w') as fh: - fh.write(recipe_json_str) - return recipe_json - - -def _cache_run_exports(tmpdir, run_exports_cache_path): - run_exports = {} - try: - with open(os.path.join(tmpdir, 'info', 'run_exports.json')) as f: - run_exports = json.load(f) - except (OSError, FileNotFoundError): - try: - with open(os.path.join(tmpdir, 'info', 'run_exports.yaml')) as f: - run_exports = yaml.safe_load(f) - except (OSError, FileNotFoundError): - log.debug("%s has no run_exports file (this is OK)" % tmpdir) - with open(run_exports_cache_path, 'w') as fh: - json.dump(run_exports, fh) - - -def _cache_icon(tmpdir, recipe_json, icon_cache_path): - # If a conda package contains an icon, also extract and cache that in an .icon/ - # directory. The icon file name is the name of the package, plus the extension - # of the icon file as indicated by the meta.yaml `app/icon` key. - # apparently right now conda-build renames all icons to 'icon.png' - # What happens if it's an ico file, or a svg file, instead of a png? Not sure! - app_icon_path = recipe_json.get('app', {}).get('icon') - if app_icon_path: - icon_path = os.path.join(tmpdir, 'info', 'recipe', app_icon_path) - if not os.path.lexists(icon_path): - icon_path = os.path.join(tmpdir, 'info', 'icon.png') - if os.path.lexists(icon_path): - icon_cache_path += splitext(app_icon_path)[-1] - utils.move_with_fallback(icon_path, icon_cache_path) - - -def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): - environment = _get_jinja2_environment() - template = environment.get_template('subdir-index.html.j2') - rendered_html = template.render( - title="{}/{}".format(channel_name or '', subdir), - packages=repodata_packages, - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - extra_paths=extra_paths, - ) - return rendered_html - - -def _make_channeldata_index_html(channel_name, channeldata): - environment = _get_jinja2_environment() - template = environment.get_template('channeldata-index.html.j2') - rendered_html = template.render( - title=channel_name, - packages=channeldata['packages'], - subdirs=channeldata['subdirs'], - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - ) - return rendered_html - - -def _get_source_repo_git_info(path): - is_repo = subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"], cwd=path) - if is_repo.strip().decode('utf-8') == "true": - output = subprocess.check_output(['git', 'log', - "--pretty=format:'%h|%ad|%an|%s'", - "--date=unix"], cwd=path) - commits = [] - for line in output.decode("utf-8").strip().splitlines(): - _hash, _time, _author, _desc = line.split("|") - commits.append({"hash": _hash, "timestamp": int(_time), - "author": _author, "description": _desc}) - return commits - - -def _cache_info_file(tmpdir, info_fn, cache_path): - info_path = os.path.join(tmpdir, 'info', info_fn) - if os.path.lexists(info_path): - utils.move_with_fallback(info_path, cache_path) - - -def _alternate_file_extension(fn): - cache_fn = fn - for ext in CONDA_PACKAGE_EXTENSIONS: - cache_fn = cache_fn.replace(ext, '') - other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, '')} - return cache_fn + next(iter(other_ext)) - - -def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): - packages = {} - conda_packages = {} - if file_path: - with open(file_path) as fi: - packages = json.load(fi) - recs = json.load(fi) - for k, v in recs.items(): - if k.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[k] = v - elif k.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[k] = v - if not repodata: - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - "packages.conda": conda_packages, - } - - channel = Channel('https://conda.anaconda.org/dummy-channel/%s' % subdir) - sd = SubdirData(channel) - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in precs or sd._package_records} - r = Resolve(index, channels=(channel,)) - return r - - -def _get_newest_versions(r, pins={}): - groups = {} - for g_name, g_recs in r.groups.items(): - if g_name in pins: - matches = [] - for pin in pins[g_name]: - version = r.find_matches(MatchSpec(f'{g_name}={pin}'))[0].version - matches.extend(r.find_matches(MatchSpec(f'{g_name}={version}'))) - else: - version = r.groups[g_name][0].version - matches = r.find_matches(MatchSpec(f'{g_name}={version}')) - groups[g_name] = matches - return [pkg for group in groups.values() for pkg in group] - - -def _add_missing_deps(new_r, original_r): - """For each package in new_r, if any deps are not satisfiable, backfill them from original_r.""" - - expanded_groups = copy.deepcopy(new_r.groups) - seen_specs = set() - for g_name, g_recs in new_r.groups.items(): - for g_rec in g_recs: - for dep_spec in g_rec.depends: - if dep_spec in seen_specs: - continue - ms = MatchSpec(dep_spec) - if not new_r.find_matches(ms): - matches = original_r.find_matches(ms) - if matches: - version = matches[0].version - expanded_groups[ms.name] = ( - set(expanded_groups.get(ms.name, [])) | - set(original_r.find_matches(MatchSpec(f'{ms.name}={version}')))) - seen_specs.add(dep_spec) - return [pkg for group in expanded_groups.values() for pkg in group] - - -def _add_prev_ver_for_features(new_r, orig_r): - expanded_groups = copy.deepcopy(new_r.groups) - for g_name in new_r.groups: - if not any(m.track_features or m.features for m in new_r.groups[g_name]): - # no features so skip - continue - - # versions are sorted here so this is the latest - latest_version = VersionOrder(str(new_r.groups[g_name][0].version)) - if g_name in orig_r.groups: - # now we iterate through the list to find the next to latest - # without a feature - keep_m = None - for i in range(len(orig_r.groups[g_name])): - _m = orig_r.groups[g_name][i] - if ( - VersionOrder(str(_m.version)) <= latest_version and - not (_m.track_features or _m.features) - ): - keep_m = _m - break - if keep_m is not None: - expanded_groups[g_name] = {keep_m} | set(expanded_groups.get(g_name, [])) - - return [pkg for group in expanded_groups.values() for pkg in group] - - -def _shard_newest_packages(subdir, r, pins=None): - """Captures only the newest versions of software in the resolve object. - - For things where more than one version is supported simultaneously (like Python), - pass pins as a dictionary, with the key being the package name, and the value being - a list of supported versions. For example: - - {'python': ["2.7", "3.6"]} - """ - groups = {} - pins = pins or {} - for g_name, g_recs in r.groups.items(): - # always do the latest implicitly - version = r.groups[g_name][0].version - matches = set(r.find_matches(MatchSpec(f'{g_name}={version}'))) - if g_name in pins: - for pin_value in pins[g_name]: - version = r.find_matches(MatchSpec(f'{g_name}={pin_value}'))[0].version - matches.update(r.find_matches(MatchSpec(f'{g_name}={version}'))) - groups[g_name] = matches - - # add the deps of the stuff in the index - new_r = _get_resolve_object(subdir, precs=[pkg for group in groups.values() for pkg in group]) - new_r = _get_resolve_object(subdir, precs=_add_missing_deps(new_r, r)) - - # now for any pkg with features, add at least one previous version - # also return - return set(_add_prev_ver_for_features(new_r, r)) - - -def _build_current_repodata(subdir, repodata, pins): - r = _get_resolve_object(subdir, repodata=repodata) - keep_pkgs = _shard_newest_packages(subdir, r, pins) - new_repodata = {k: repodata[k] for k in set(repodata.keys()) - {'packages', 'packages.conda'}} - packages = {} - conda_packages = {} - for keep_pkg in keep_pkgs: - if keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[keep_pkg.fn] = repodata['packages.conda'][keep_pkg.fn] - # in order to prevent package churn we consider the md5 for the .tar.bz2 that matches the .conda file - # This holds when .conda files contain the same files as .tar.bz2, which is an assumption we'll make - # until it becomes more prevalent that people provide only .conda files and just skip .tar.bz2 - counterpart = keep_pkg.fn.replace(CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) - conda_packages[keep_pkg.fn]['legacy_bz2_md5'] = repodata['packages'].get(counterpart, {}).get('md5') - elif keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[keep_pkg.fn] = repodata['packages'][keep_pkg.fn] - new_repodata['packages'] = packages - new_repodata['packages.conda'] = conda_packages - return new_repodata - - -class ChannelIndex: - - def __init__(self, channel_root, channel_name, subdirs=None, threads=MAX_THREADS_DEFAULT, - deep_integrity_check=False, debug=False): - self.channel_root = abspath(channel_root) - self.channel_name = channel_name or basename(channel_root.rstrip('/')) - self._subdirs = subdirs - self.thread_executor = ( - DummyExecutor() - if debug or sys.version_info.major == 2 or threads == 1 - else ProcessPoolExecutor(threads) - ) - self.deep_integrity_check = deep_integrity_check - - def index(self, patch_generator, hotfix_source_repo=None, verbose=False, progress=False, - current_index_versions=None, index_file=None): - if verbose: - level = logging.DEBUG - else: - level = logging.ERROR - - with utils.LoggingContext(level, loggers=[__name__]): - if not self._subdirs: - detected_subdirs = { - subdir.name - for subdir in os.scandir(self.channel_root) - if subdir.name in utils.DEFAULT_SUBDIRS and subdir.is_dir() - } - log.debug("found subdirs %s" % detected_subdirs) - self.subdirs = subdirs = sorted(detected_subdirs | {'noarch'}) - else: - self.subdirs = subdirs = sorted(set(self._subdirs) | {'noarch'}) - - # Step 1. Lock local channel. - with utils.try_acquire_locks([utils.get_lock(self.channel_root)], timeout=900): - channel_data = {} - channeldata_file = os.path.join(self.channel_root, 'channeldata.json') - if os.path.isfile(channeldata_file): - with open(channeldata_file) as f: - channel_data = json.load(f) - # Step 2. Collect repodata from packages, save to pkg_repodata.json file - with tqdm(total=len(subdirs), disable=(verbose or not progress), leave=False) as t: - for subdir in subdirs: - t.set_description("Subdir: %s" % subdir) - t.update() - with tqdm(total=8, disable=(verbose or not progress), leave=False) as t2: - t2.set_description("Gathering repodata") - t2.update() - _ensure_valid_channel(self.channel_root, subdir) - repodata_from_packages = self.index_subdir( - subdir, verbose=verbose, progress=progress, - index_file=index_file) - - t2.set_description("Writing pre-patch repodata") - t2.update() - self._write_repodata(subdir, repodata_from_packages, - REPODATA_FROM_PKGS_JSON_FN) - - # Step 3. Apply patch instructions. - t2.set_description("Applying patch instructions") - t2.update() - patched_repodata, patch_instructions = self._patch_repodata( - subdir, repodata_from_packages, patch_generator) - - # Step 4. Save patched and augmented repodata. - # If the contents of repodata have changed, write a new repodata.json file. - # Also create associated index.html. - - t2.set_description("Writing patched repodata") - t2.update() - self._write_repodata(subdir, patched_repodata, REPODATA_JSON_FN) - t2.set_description("Building current_repodata subset") - t2.update() - current_repodata = _build_current_repodata(subdir, patched_repodata, - pins=current_index_versions) - t2.set_description("Writing current_repodata subset") - t2.update() - self._write_repodata(subdir, current_repodata, json_filename="current_repodata.json") - - t2.set_description("Writing subdir index HTML") - t2.update() - self._write_subdir_index_html(subdir, patched_repodata) - - t2.set_description("Updating channeldata") - t2.update() - self._update_channeldata(channel_data, patched_repodata, subdir) - - # Step 7. Create and write channeldata. - self._write_channeldata_index_html(channel_data) - self._write_channeldata(channel_data) - - def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): - subdir_path = join(self.channel_root, subdir) - self._ensure_dirs(subdir) - repodata_json_path = join(subdir_path, REPODATA_FROM_PKGS_JSON_FN) - - if verbose: - log.info("Building repodata for %s" % subdir_path) - - # gather conda package filenames in subdir - # we'll process these first, because reading their metadata is much faster - fns_in_subdir = {fn for fn in os.listdir(subdir_path) if fn.endswith('.conda') or fn.endswith('.tar.bz2')} - - # load current/old repodata - try: - with open(repodata_json_path) as fh: - old_repodata = json.load(fh) or {} - except (OSError, JSONDecodeError): - # log.info("no repodata found at %s", repodata_json_path) - old_repodata = {} - - old_repodata_packages = old_repodata.get("packages", {}) - old_repodata_conda_packages = old_repodata.get("packages.conda", {}) - old_repodata_fns = set(old_repodata_packages) | set(old_repodata_conda_packages) - - # Load stat cache. The stat cache has the form - # { - # 'package_name.tar.bz2': { - # 'mtime': 123456, - # 'md5': 'abd123', - # }, - # } - stat_cache_path = join(subdir_path, '.cache', 'stat.json') - try: - with open(stat_cache_path) as fh: - stat_cache = json.load(fh) or {} - except: - stat_cache = {} - - stat_cache_original = stat_cache.copy() - - remove_set = old_repodata_fns - fns_in_subdir - ignore_set = set(old_repodata.get('removed', [])) - try: - # calculate all the paths and figure out what we're going to do with them - # add_set: filenames that aren't in the current/old repodata, but exist in the subdir - if index_file: - with open(index_file) as fin: - add_set = set() - for line in fin: - fn_subdir, fn = line.strip().split('/') - if fn_subdir != subdir: - continue - if fn.endswith('.conda') or fn.endswith('.tar.bz2'): - add_set.add(fn) - else: - add_set = fns_in_subdir - old_repodata_fns - - add_set -= ignore_set - - # update_set: Filenames that are in both old repodata and new repodata, - # and whose contents have changed based on file size or mtime. We're - # not using md5 here because it takes too long. If needing to do full md5 checks, - # use the --deep-integrity-check flag / self.deep_integrity_check option. - update_set = self._calculate_update_set( - subdir, fns_in_subdir, old_repodata_fns, stat_cache, - verbose=verbose, progress=progress - ) - # unchanged_set: packages in old repodata whose information can carry straight - # across to new repodata - unchanged_set = set(old_repodata_fns - update_set - remove_set - ignore_set) - - assert isinstance(unchanged_set, set) # faster `in` queries - - # clean up removed files - removed_set = (old_repodata_fns - fns_in_subdir) - for fn in removed_set: - if fn in stat_cache: - del stat_cache[fn] - - new_repodata_packages = {k: v for k, v in old_repodata.get('packages', {}).items() if k in unchanged_set} - new_repodata_conda_packages = {k: v for k, v in old_repodata.get('packages.conda', {}).items() if k in unchanged_set} - - for k in sorted(unchanged_set): - if not (k in new_repodata_packages or k in new_repodata_conda_packages): - fn, rec = ChannelIndex._load_index_from_cache(self.channel_root, subdir, fn, stat_cache) - # this is how we pass an exception through. When fn == rec, there's been a problem, - # and we need to reload this file - if fn == rec: - update_set.add(fn) - else: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - new_repodata_packages[fn] = rec - else: - new_repodata_conda_packages[fn] = rec - - # Invalidate cached files for update_set. - # Extract and cache update_set and add_set, then add to new_repodata_packages. - # This is also where we update the contents of the stat_cache for successfully - # extracted packages. - # Sorting here prioritizes .conda files ('c') over .tar.bz2 files ('b') - hash_extract_set = (*add_set, *update_set) - - extract_func = functools.partial(ChannelIndex._extract_to_cache, - self.channel_root, subdir) - # split up the set by .conda packages first, then .tar.bz2. This avoids race conditions - # with execution in parallel that would end up in the same place. - for conda_format in tqdm(CONDA_PACKAGE_EXTENSIONS, desc="File format", - disable=(verbose or not progress), leave=False): - for fn, mtime, size, index_json in tqdm( - self.thread_executor.map( - extract_func, - (fn for fn in hash_extract_set if fn.endswith(conda_format))), - desc="hash & extract packages for %s" % subdir, - disable=(verbose or not progress), leave=False): - - # fn can be None if the file was corrupt or no longer there - if fn and mtime: - stat_cache[fn] = {'mtime': int(mtime), 'size': size} - if index_json: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - new_repodata_conda_packages[fn] = index_json - else: - new_repodata_packages[fn] = index_json - else: - log.error("Package at %s did not contain valid index.json data. Please" - " check the file and remove/redownload if necessary to obtain " - "a valid package." % os.path.join(subdir_path, fn)) - - new_repodata = { - 'packages': new_repodata_packages, - 'packages.conda': new_repodata_conda_packages, - 'info': { - 'subdir': subdir, - }, - 'repodata_version': REPODATA_VERSION, - 'removed': sorted(list(ignore_set)) - } - finally: - if stat_cache != stat_cache_original: - # log.info("writing stat cache to %s", stat_cache_path) - with open(stat_cache_path, 'w') as fh: - json.dump(stat_cache, fh) - return new_repodata - - def _ensure_dirs(self, subdir): - # Create all cache directories in the subdir. - ensure = lambda path: isdir(path) or os.makedirs(path) - cache_path = join(self.channel_root, subdir, '.cache') - ensure(cache_path) - ensure(join(cache_path, 'index')) - ensure(join(cache_path, 'about')) - ensure(join(cache_path, 'paths')) - ensure(join(cache_path, 'recipe')) - ensure(join(cache_path, 'run_exports')) - ensure(join(cache_path, 'post_install')) - ensure(join(cache_path, 'icon')) - ensure(join(self.channel_root, 'icons')) - ensure(join(cache_path, 'recipe_log')) - - def _calculate_update_set(self, subdir, fns_in_subdir, old_repodata_fns, stat_cache, - verbose=False, progress=True): - # Determine the packages that already exist in repodata, but need to be updated. - # We're not using md5 here because it takes too long. - candidate_fns = fns_in_subdir & old_repodata_fns - subdir_path = join(self.channel_root, subdir) - - update_set = set() - for fn in tqdm(iter(candidate_fns), desc="Finding updated files", - disable=(verbose or not progress), leave=False): - if fn not in stat_cache: - update_set.add(fn) - else: - stat_result = os.stat(join(subdir_path, fn)) - if (int(stat_result.st_mtime) != int(stat_cache[fn]['mtime']) or - stat_result.st_size != stat_cache[fn]['size']): - update_set.add(fn) - return update_set - - @staticmethod - def _extract_to_cache(channel_root, subdir, fn, second_try=False): - # This method WILL reread the tarball. Probably need another one to exit early if - # there are cases where it's fine not to reread. Like if we just rebuild repodata - # from the cached files, but don't use the existing repodata.json as a starting point. - subdir_path = join(channel_root, subdir) - - # allow .conda files to reuse cache from .tar.bz2 and vice-versa. - # Assumes that .tar.bz2 and .conda files have exactly the same - # contents. This is convention, but not guaranteed, nor checked. - alternate_cache_fn = _alternate_file_extension(fn) - cache_fn = fn - - abs_fn = os.path.join(subdir_path, fn) - - stat_result = os.stat(abs_fn) - size = stat_result.st_size - mtime = stat_result.st_mtime - retval = fn, mtime, size, None - - index_cache_path = join(subdir_path, '.cache', 'index', cache_fn + '.json') - about_cache_path = join(subdir_path, '.cache', 'about', cache_fn + '.json') - paths_cache_path = join(subdir_path, '.cache', 'paths', cache_fn + '.json') - recipe_cache_path = join(subdir_path, '.cache', 'recipe', cache_fn + '.json') - run_exports_cache_path = join(subdir_path, '.cache', 'run_exports', cache_fn + '.json') - post_install_cache_path = join(subdir_path, '.cache', 'post_install', cache_fn + '.json') - icon_cache_path = join(subdir_path, '.cache', 'icon', cache_fn) - - log.debug("hashing, extracting, and caching %s" % fn) - - alternate_cache = False - if (not os.path.exists(index_cache_path) and - os.path.exists(index_cache_path.replace(fn, alternate_cache_fn))): - alternate_cache = True - - try: - # allow .tar.bz2 files to use the .conda cache, but not vice-versa. - # .conda readup is very fast (essentially free), but .conda files come from - # converting .tar.bz2 files, which can go wrong. Forcing extraction for - # .conda files gives us a check on the validity of that conversion. - if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile(index_cache_path): - with open(index_cache_path) as f: - index_json = json.load(f) - elif not alternate_cache and (second_try or not os.path.exists(index_cache_path)): - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(abs_fn, dest_dir=tmpdir, components="info") - index_file = os.path.join(tmpdir, 'info', 'index.json') - if not os.path.exists(index_file): - return retval - with open(index_file) as f: - index_json = json.load(f) - - _cache_info_file(tmpdir, 'about.json', about_cache_path) - _cache_info_file(tmpdir, 'paths.json', paths_cache_path) - _cache_info_file(tmpdir, 'recipe_log.json', paths_cache_path) - _cache_run_exports(tmpdir, run_exports_cache_path) - _cache_post_install_details(paths_cache_path, post_install_cache_path) - recipe_json = _cache_recipe(tmpdir, recipe_cache_path) - _cache_icon(tmpdir, recipe_json, icon_cache_path) - - # decide what fields to filter out, like has_prefix - filter_fields = { - 'arch', - 'has_prefix', - 'mtime', - 'platform', - 'ucs', - 'requires_features', - 'binstar', - 'target-triplet', - 'machine', - 'operatingsystem', - } - for field_name in filter_fields & set(index_json): - del index_json[field_name] - elif alternate_cache: - # we hit the cache of the other file type. Copy files to this name, and replace - # the size, md5, and sha256 values - paths = [index_cache_path, about_cache_path, paths_cache_path, recipe_cache_path, - run_exports_cache_path, post_install_cache_path, icon_cache_path] - bizarro_paths = [_.replace(fn, alternate_cache_fn) for _ in paths] - for src, dest in zip(bizarro_paths, paths): - if os.path.exists(src): - try: - os.makedirs(os.path.dirname(dest)) - except: - pass - utils.copy_into(src, dest) - - with open(index_cache_path) as f: - index_json = json.load(f) - else: - with open(index_cache_path) as f: - index_json = json.load(f) - - # calculate extra stuff to add to index.json cache, size, md5, sha256 - # This is done always for all files, whether the cache is loaded or not, - # because the cache may be from the other file type. We don't store this - # info in the cache to avoid confusion. - index_json.update(conda_package_handling.api.get_pkg_details(abs_fn)) - - with open(index_cache_path, 'w') as fh: - json.dump(index_json, fh) - retval = fn, mtime, size, index_json - except (InvalidArchiveError, KeyError, EOFError, JSONDecodeError): - if not second_try: - return ChannelIndex._extract_to_cache(channel_root, subdir, fn, second_try=True) - return retval - - @staticmethod - def _load_index_from_cache(channel_root, subdir, fn, stat_cache): - index_cache_path = join(channel_root, subdir, '.cache', 'index', fn + '.json') - try: - with open(index_cache_path) as fh: - index_json = json.load(fh) - except (OSError, JSONDecodeError): - index_json = fn - - return fn, index_json - - @staticmethod - def _load_all_from_cache(channel_root, subdir, fn): - subdir_path = join(channel_root, subdir) - try: - mtime = getmtime(join(subdir_path, fn)) - except FileNotFoundError: - return {} - # In contrast to self._load_index_from_cache(), this method reads up pretty much - # all of the cached metadata, except for paths. It all gets dumped into a single map. - index_cache_path = join(subdir_path, '.cache', 'index', fn + '.json') - about_cache_path = join(subdir_path, '.cache', 'about', fn + '.json') - recipe_cache_path = join(subdir_path, '.cache', 'recipe', fn + '.json') - run_exports_cache_path = join(subdir_path, '.cache', 'run_exports', fn + '.json') - post_install_cache_path = join(subdir_path, '.cache', 'post_install', fn + '.json') - icon_cache_path_glob = join(subdir_path, '.cache', 'icon', fn + ".*") - recipe_log_path = join(subdir_path, '.cache', 'recipe_log', fn + '.json') - - data = {} - for path in (recipe_cache_path, about_cache_path, index_cache_path, post_install_cache_path, recipe_log_path): - try: - if os.path.getsize(path) != 0: - with open(path) as fh: - data.update(json.load(fh)) - except (OSError, EOFError): - pass - - try: - icon_cache_paths = glob(icon_cache_path_glob) - if icon_cache_paths: - icon_cache_path = sorted(icon_cache_paths)[-1] - icon_ext = icon_cache_path.rsplit('.', 1)[-1] - channel_icon_fn = "{}.{}".format(data['name'], icon_ext) - icon_url = "icons/" + channel_icon_fn - icon_channel_path = join(channel_root, 'icons', channel_icon_fn) - icon_md5 = utils.md5_file(icon_cache_path) - icon_hash = f"md5:{icon_md5}:{getsize(icon_cache_path)}" - data.update(icon_hash=icon_hash, icon_url=icon_url) - # log.info("writing icon from %s to %s", icon_cache_path, icon_channel_path) - utils.move_with_fallback(icon_cache_path, icon_channel_path) - except: - pass - - # have to stat again, because we don't have access to the stat cache here - data['mtime'] = mtime - - source = data.get("source", {}) - try: - data.update({"source_" + k: v for k, v in source.items()}) - except AttributeError: - # sometimes source is a list instead of a dict - pass - _clear_newline_chars(data, 'description') - _clear_newline_chars(data, 'summary') - try: - with open(run_exports_cache_path) as fh: - data["run_exports"] = json.load(fh) - except (OSError, EOFError): - data["run_exports"] = {} - return data - - def _write_repodata(self, subdir, repodata, json_filename): - repodata_json_path = join(self.channel_root, subdir, json_filename) - new_repodata_binary = json.dumps(repodata, indent=2, sort_keys=True,).replace("':'", "': '").encode("utf-8") - write_result = _maybe_write(repodata_json_path, new_repodata_binary, write_newline_end=True) - if write_result: - repodata_bz2_path = repodata_json_path + ".bz2" - bz2_content = bz2.compress(new_repodata_binary) - _maybe_write(repodata_bz2_path, bz2_content, content_is_binary=True) - return write_result - - def _write_subdir_index_html(self, subdir, repodata): - repodata_packages = repodata["packages"] - subdir_path = join(self.channel_root, subdir) - - def _add_extra_path(extra_paths, path): - if isfile(join(self.channel_root, path)): - extra_paths[basename(path)] = { - 'size': getsize(path), - 'timestamp': int(getmtime(path)), - 'sha256': utils.sha256_checksum(path), - 'md5': utils.md5_file(path), - } - - extra_paths = OrderedDict() - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN + '.bz2')) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN + '.bz2')) - # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) - _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) - rendered_html = _make_subdir_index_html( - self.channel_name, subdir, repodata_packages, extra_paths - ) - index_path = join(subdir_path, 'index.html') - return _maybe_write(index_path, rendered_html) - - def _write_channeldata_index_html(self, channeldata): - rendered_html = _make_channeldata_index_html( - self.channel_name, channeldata + dir_path = parent_path + subdirs = [dirname] + + log_level = logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING + with utils.LoggingContext(log_level): + return _update_index( + dir_path, + check_md5=check_md5, + channel_name=channel_name, + patch_generator=patch_generator, + threads=threads, + verbose=verbose, + progress=progress, + subdirs=subdirs, + warn=warn, + current_index_versions=current_index_versions, + debug=debug, ) - index_path = join(self.channel_root, 'index.html') - _maybe_write(index_path, rendered_html) - - def _update_channeldata(self, channel_data, repodata, subdir): - legacy_packages = repodata["packages"] - conda_packages = repodata["packages.conda"] - - use_these_legacy_keys = set(legacy_packages.keys()) - {k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys()} - all_packages = conda_packages.copy() - all_packages.update({k: legacy_packages[k] for k in use_these_legacy_keys}) - package_data = channel_data.get('packages', {}) - - def _append_group(groups, candidates): - candidate = sorted(candidates, key=lambda x: x[1].get("timestamp", 0))[-1] - pkg_dict = candidate[1] - pkg_name = pkg_dict['name'] - - run_exports = package_data.get(pkg_name, {}).get('run_exports', {}) - if (pkg_name not in package_data or - subdir not in package_data.get(pkg_name, {}).get('subdirs', []) or - package_data.get(pkg_name, {}).get('timestamp', 0) < - _make_seconds(pkg_dict.get('timestamp', 0)) or - run_exports and pkg_dict['version'] not in run_exports): - groups.append(candidate) - - groups = [] - for name, group in groupby(all_packages.items(), lambda x: x[1]["name"]): - if name not in package_data or package_data[name].get("run_exports"): - # pay special attention to groups that have run_exports - we need to process each version - # group by version; take newest per version group. We handle groups that are not - # in the index t all yet similarly, because we can't check if they have any run_exports - for _, vgroup in groupby(group, lambda x: x[1]["version"]): - _append_group(groups, vgroup) - else: - # take newest per group - _append_group(groups, group) - - def _replace_if_newer_and_present(pd, data, erec, data_newer, k): - if data.get(k) and (data_newer or not erec.get(k)): - pd[k] = data[k] - else: - pd[k] = erec.get(k) - - # unzipping - fns, fn_dicts = [], [] - if groups: - fns, fn_dicts = zip(*groups) - - load_func = functools.partial(ChannelIndex._load_all_from_cache, - self.channel_root, subdir,) - for fn_dict, data in zip(fn_dicts, self.thread_executor.map(load_func, fns)): - if data: - data.update(fn_dict) - name = data['name'] - # existing record - erec = package_data.get(name, {}) - data_v = data.get('version', '0') - erec_v = erec.get('version', '0') - data_newer = VersionOrder(data_v) > VersionOrder(erec_v) - - package_data[name] = package_data.get(name, {}) - # keep newer value for these - for k in ('description', 'dev_url', 'doc_url', 'doc_source_url', 'home', 'license', - 'source_url', 'source_git_url', 'summary', 'icon_url', 'icon_hash', 'tags', - 'identifiers', 'keywords', 'recipe_origin', 'version'): - _replace_if_newer_and_present(package_data[name], data, erec, data_newer, k) - - # keep any true value for these, since we don't distinguish subdirs - for k in ("binary_prefix", "text_prefix", "activate.d", "deactivate.d", "pre_link", - "post_link", "pre_unlink"): - package_data[name][k] = any((data.get(k), erec.get(k))) - - package_data[name]['subdirs'] = sorted(list(set(erec.get('subdirs', []) + [subdir]))) - # keep one run_exports entry per version of the package, since these vary by version - run_exports = erec.get('run_exports', {}) - exports_from_this_version = data.get('run_exports') - if exports_from_this_version: - run_exports[data_v] = data.get('run_exports') - package_data[name]['run_exports'] = run_exports - package_data[name]['timestamp'] = _make_seconds(max( - data.get('timestamp', 0), channel_data.get(name, {}).get('timestamp', 0))) - - channel_data.update({ - 'channeldata_version': CHANNELDATA_VERSION, - 'subdirs': sorted(list(set(channel_data.get('subdirs', []) + [subdir]))), - 'packages': package_data, - }) - - def _write_channeldata(self, channeldata): - # trim out commits, as they can take up a ton of space. They're really only for the RSS feed. - for _pkg, pkg_dict in channeldata.get('packages', {}).items(): - if "commits" in pkg_dict: - del pkg_dict['commits'] - channeldata_path = join(self.channel_root, 'channeldata.json') - content = json.dumps(channeldata, indent=2, sort_keys=True).replace("':'", "': '") - _maybe_write(channeldata_path, content, True) - - def _load_patch_instructions_tarball(self, subdir, patch_generator): - instructions = {} - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(patch_generator, dest_dir=tmpdir) - instructions_file = os.path.join(tmpdir, subdir, "patch_instructions.json") - if os.path.isfile(instructions_file): - with open(instructions_file) as f: - instructions = json.load(f) - return instructions - - def _create_patch_instructions(self, subdir, repodata, patch_generator=None): - gen_patch_path = patch_generator or join(self.channel_root, 'gen_patch.py') - if isfile(gen_patch_path): - log.debug(f"using patch generator {gen_patch_path} for {subdir}") - - # https://stackoverflow.com/a/41595552/2127762 - try: - from importlib.util import spec_from_file_location, module_from_spec - spec = spec_from_file_location('a_b', gen_patch_path) - mod = module_from_spec(spec) - - spec.loader.exec_module(mod) - # older pythons - except ImportError: - import imp - mod = imp.load_source('a_b', gen_patch_path) - - instructions = mod._patch_repodata(repodata, subdir) - - if instructions.get('patch_instructions_version', 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return instructions - else: - if patch_generator: - raise ValueError("Specified metadata patch file '{}' does not exist. Please try an absolute " - "path, or examine your relative path carefully with respect to your cwd." - .format(patch_generator)) - return {} - - def _write_patch_instructions(self, subdir, instructions): - new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace("':'", "': '") - patch_instructions_path = join(self.channel_root, subdir, 'patch_instructions.json') - _maybe_write(patch_instructions_path, new_patch, True) - - def _load_instructions(self, subdir): - patch_instructions_path = join(self.channel_root, subdir, 'patch_instructions.json') - if isfile(patch_instructions_path): - log.debug("using patch instructions %s" % patch_instructions_path) - with open(patch_instructions_path) as fh: - instructions = json.load(fh) - if instructions.get('patch_instructions_version', 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - return instructions - return {} - - def _patch_repodata(self, subdir, repodata, patch_generator=None): - if patch_generator and any(patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS): - instructions = self._load_patch_instructions_tarball(subdir, patch_generator) - else: - instructions = self._create_patch_instructions(subdir, repodata, patch_generator) - if instructions: - self._write_patch_instructions(subdir, instructions) - else: - instructions = self._load_instructions(subdir) - if instructions.get('patch_instructions_version', 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return _apply_instructions(subdir, repodata, instructions), instructions diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index ea7be064f8..43fc401551 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -1,60 +1,71 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import defaultdict -from itertools import groupby -from functools import lru_cache +from __future__ import annotations + import json -from operator import itemgetter -from os.path import abspath, join, dirname, exists, basename, normcase import os -import re import sys -import tempfile - -from conda_build.os_utils.ldd import get_linkages, get_package_obj_files, get_untracked_obj_files -from conda_build.os_utils.liefldd import codefile_type -from conda_build.os_utils.macho import get_rpaths, human_filetype -from conda_build.utils import ( +from collections import defaultdict +from itertools import groupby +from operator import itemgetter +from os.path import abspath, basename, dirname, exists, join, normcase +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING + +from conda.api import Solver +from conda.base.context import context +from conda.cli.common import specs_from_args +from conda.core.index import get_index +from conda.core.prefix_data import PrefixData +from conda.models.records import PrefixRecord + +from .os_utils.ldd import ( + get_linkages, + get_package_obj_files, + get_untracked_obj_files, +) +from .os_utils.liefldd import codefile_class, machofile +from .os_utils.macho import get_rpaths, human_filetype +from .utils import ( comma_join, - rm_rf, - package_has_file, - get_logger, ensure_list, + get_logger, + on_linux, + on_mac, + on_win, + package_has_file, ) -from conda_build.conda_interface import ( - specs_from_args, - is_linked, - linked_data, - get_index, -) -from conda_build.conda_interface import display_actions, install_actions +if TYPE_CHECKING: + from typing import Iterable, Literal +log = get_logger(__name__) -@lru_cache(maxsize=None) -def dist_files(prefix, dist): - meta = is_linked(prefix, dist) - return set(meta['files']) if meta else set() +def which_package( + path: str | os.PathLike | Path, + prefix: str | os.PathLike | Path, +) -> Iterable[PrefixRecord]: + """Detect which package(s) a path belongs to. -def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False): - """ - given the path of a conda installed file iterate over + Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. """ - norm_ipp = normcase(in_prefix_path.replace(os.sep, '/')) - from conda_build.utils import linked_data_no_multichannels - if avoid_canonical_channel_name: - fn = linked_data_no_multichannels - else: - fn = linked_data - for dist in fn(prefix): - # dfiles = set(dist.get('files', [])) - dfiles = dist_files(prefix, dist) - # TODO :: This is completely wrong when the env is on a case-sensitive FS! - if any(norm_ipp == normcase(w) for w in dfiles): - yield dist + try: + path = Path(path).relative_to(prefix) + except ValueError: + # ValueError: path is already relative to prefix + pass + # On Windows, be lenient and allow case-insensitive path comparisons. + # NOTE: On macOS, although case-insensitive filesystem is default, still + # require case-sensitive matches (i.e., normcase on macOS is a no-op). + normcase_path = normcase(path) + + for prec in PrefixData(str(prefix)).iter_records(): + if normcase_path in (normcase(file) for file in prec["files"]): + yield prec def print_object_info(info, key): @@ -67,10 +78,10 @@ def print_object_info(info, key): continue if f_info[data] is None: continue - output_string += f' {data}: {f_info[data]}\n' + output_string += f" {data}: {f_info[data]}\n" if len([i for i in f_info if f_info[i] is not None and i != key]) > 1: - output_string += '\n' - output_string += '\n' + output_string += "\n" + output_string += "\n" return output_string @@ -82,199 +93,195 @@ def __str__(self): untracked_package = _untracked_package() -def check_install(packages, platform=None, channel_urls=(), prepend=True, - minimal_hint=False): - prefix = tempfile.mkdtemp('conda') - try: - specs = specs_from_args(packages) - index = get_index(channel_urls=channel_urls, prepend=prepend, - platform=platform, prefix=prefix) - actions = install_actions(prefix, index, specs, pinned=False, - minimal_hint=minimal_hint) - display_actions(actions, index) - return actions - finally: - rm_rf(prefix) - return None - - -def print_linkages(depmap, show_files=False): - # Print system and not found last - dist_depmap = {} - for k, v in depmap.items(): - if hasattr(k, 'dist_name'): - k = k.dist_name - dist_depmap[k] = v - - depmap = dist_depmap - k = sorted(set(depmap.keys()) - {'system', 'not found'}) - all_deps = k if 'not found' not in depmap.keys() else k + ['system', 'not found'] +def check_install( + packages: Iterable[str], + subdir: str | None = None, + channel_urls: Iterable[str] = (), +) -> None: + with TemporaryDirectory() as prefix: + Solver( + prefix, + channel_urls, + [subdir or context.subdir], + specs_from_args(packages), + ).solve_for_transaction(ignore_pinned=True).print_transaction_summary() + + +def print_linkages( + depmap: dict[ + PrefixRecord | Literal["not found", "system", "untracked"], + list[tuple[str, str, str]], + ], + show_files: bool = False, +) -> str: + # print system, not found, and untracked last + sort_order = { + # PrefixRecord: (0, PrefixRecord.name), + "system": (1, "system"), + "not found": (2, "not found"), + "untracked": (3, "untracked"), + # str: (4, str), + } + output_string = "" - for dep in all_deps: - output_string += "%s:\n" % dep + for prec, links in sorted( + depmap.items(), + key=( + lambda key: (0, key[0].name) + if isinstance(key[0], PrefixRecord) + else sort_order.get(key[0], (4, key[0])) + ), + ): + output_string += f"{prec}:\n" if show_files: - for lib, path, binary in sorted(depmap[dep]): + for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" else: - for lib, path in sorted(set(map(itemgetter(0, 1), depmap[dep]))): + for lib, path in sorted(set(map(itemgetter(0, 1), links))): output_string += f" {lib} ({path})\n" output_string += "\n" return output_string def replace_path(binary, path, prefix): - if sys.platform.startswith('linux'): + if on_linux: return abspath(path) - elif sys.platform.startswith('darwin'): + elif on_mac: if path == basename(binary): return abspath(join(prefix, binary)) - if '@rpath' in path: + if "@rpath" in path: rpaths = get_rpaths(join(prefix, binary)) if not rpaths: return "NO LC_RPATH FOUND" else: for rpath in rpaths: path1 = path.replace("@rpath", rpath) - path1 = path1.replace('@loader_path', join(prefix, dirname(binary))) + path1 = path1.replace("@loader_path", join(prefix, dirname(binary))) if exists(abspath(join(prefix, path1))): path = path1 break else: - return 'not found' - path = path.replace('@loader_path', join(prefix, dirname(binary))) - if path.startswith('/'): + return "not found" + path = path.replace("@loader_path", join(prefix, dirname(binary))) + if path.startswith("/"): return abspath(path) - return 'not found' + return "not found" -def test_installable(channel='defaults'): +def test_installable(channel: str = "defaults") -> bool: success = True - log = get_logger(__name__) - has_py = re.compile(r'py(\d)(\d)') - for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']: - log.info("######## Testing platform %s ########", platform) - channels = [channel] - index = get_index(channel_urls=channels, prepend=False, platform=platform) - for _, rec in index.items(): - # If we give channels at the command line, only look at - # packages from those channels (not defaults). - if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults': - continue - name = rec['name'] - if name in {'conda', 'conda-build'}: + for subdir in ["osx-64", "linux-32", "linux-64", "win-32", "win-64"]: + log.info("######## Testing subdir %s ########", subdir) + for prec in get_index(channel_urls=[channel], prepend=False, platform=subdir): + name = prec["name"] + if name in {"conda", "conda-build"}: # conda can only be installed in the root environment continue - if name.endswith('@'): + elif name.endswith("@"): # this is a 'virtual' feature record that conda adds to the index for the solver # and should be ignored here continue - # Don't fail just because the package is a different version of Python - # than the default. We should probably check depends rather than the - # build string. - build = rec['build'] - match = has_py.search(build) - assert match if 'py' in build else True, build - if match: - additional_packages = [f'python={match.group(1)}.{match.group(2)}'] - else: - additional_packages = [] - version = rec['version'] - log.info('Testing %s=%s', name, version) + version = prec["version"] + log.info("Testing %s=%s", name, version) try: - install_steps = check_install([name + '=' + version] + additional_packages, - channel_urls=channels, prepend=False, - platform=platform) - success &= bool(install_steps) - except KeyboardInterrupt: - raise - # sys.exit raises an exception that doesn't subclass from Exception - except BaseException as e: + check_install( + [f"{name}={version}"], + channel_urls=[channel], + prepend=False, + subdir=subdir, + ) + except Exception as err: success = False - log.error("FAIL: %s %s on %s with %s (%s)", name, version, - platform, additional_packages, e) + log.error( + "[%s/%s::%s=%s] %s", + channel, + subdir, + name, + version, + repr(err), + ) return success -def _installed(prefix): - installed = linked_data(prefix) - installed = {rec['name']: dist for dist, rec in installed.items()} - return installed - - def _underlined_text(text): - return str(text) + '\n' + '-' * len(str(text)) + '\n\n' - - -def inspect_linkages(packages, prefix=sys.prefix, untracked=False, - all_packages=False, show_files=False, groupby="package", sysroot=""): - pkgmap = {} - - installed = _installed(prefix) - + return str(text) + "\n" + "-" * len(str(text)) + "\n\n" + + +def inspect_linkages( + packages: Iterable[str | _untracked_package], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package", "dependency"] = "package", + sysroot: str = "", +) -> str: if not packages and not untracked and not all_packages: - raise ValueError("At least one package or --untracked or --all must be provided") + sys.exit("At least one package or --untracked or --all must be provided") + elif on_win: + sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") + + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} if all_packages: packages = sorted(installed.keys()) - + packages = ensure_list(packages) if untracked: packages.append(untracked_package) - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - sys.exit(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - if not sys.platform.startswith(('linux', 'darwin')): - sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") - - if dist == untracked_package: + pkgmap: dict[str | _untracked_package, dict[str, list]] = {} + for name in packages: + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + sys.exit(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + linkages = get_linkages(obj_files, prefix, sysroot) - depmap = defaultdict(list) - pkgmap[pkg] = depmap - depmap['not found'] = [] - depmap['system'] = [] - for binary in linkages: - for lib, path in linkages[binary]: - path = replace_path(binary, path, prefix) if path not in {'', - 'not found'} else path - if path.startswith(prefix): - in_prefix_path = re.sub('^' + prefix + '/', '', path) - deps = list(which_package(in_prefix_path, prefix)) - if len(deps) > 1: - deps_str = [str(dep) for dep in deps] - get_logger(__name__).warn("Warning: %s comes from multiple " - "packages: %s", path, comma_join(deps_str)) - if not deps: + pkgmap[name] = depmap = defaultdict(list) + for binary, paths in linkages.items(): + for lib, path in paths: + path = ( + replace_path(binary, path, prefix) + if path not in {"", "not found"} + else path + ) + try: + relative = str(Path(path).relative_to(prefix)) + except ValueError: + # ValueError: path is not relative to prefix + relative = None + if relative: + precs = list(which_package(relative, prefix)) + if len(precs) > 1: + get_logger(__name__).warn( + "Warning: %s comes from multiple packages: %s", + path, + comma_join(map(str, precs)), + ) + elif not precs: if exists(path): - depmap['untracked'].append((lib, path.split(prefix + - '/', 1)[-1], binary)) + depmap["untracked"].append((lib, relative, binary)) else: - depmap['not found'].append((lib, path.split(prefix + - '/', 1)[-1], binary)) - for d in deps: - depmap[d].append((lib, path.split(prefix + '/', - 1)[-1], binary)) - elif path == 'not found': - depmap['not found'].append((lib, path, binary)) + depmap["not found"].append((lib, relative, binary)) + for prec in precs: + depmap[prec].append((lib, relative, binary)) + elif path == "not found": + depmap["not found"].append((lib, path, binary)) else: - depmap['system'].append((lib, path, binary)) + depmap["system"].append((lib, path, binary)) output_string = "" - if groupby == 'package': + if groupby == "package": for pkg in packages: output_string += _underlined_text(pkg) output_string += print_linkages(pkgmap[pkg], show_files=show_files) - elif groupby == 'dependency': + elif groupby == "dependency": # {pkg: {dep: [files]}} -> {dep: {pkg: [files]}} inverted_map = defaultdict(lambda: defaultdict(list)) for pkg in pkgmap: @@ -283,65 +290,67 @@ def inspect_linkages(packages, prefix=sys.prefix, untracked=False, inverted_map[dep][pkg] = pkgmap[pkg][dep] # print system and not found last - k = sorted(set(inverted_map.keys()) - {'system', 'not found'}) - for dep in k + ['system', 'not found']: + k = sorted(set(inverted_map.keys()) - {"system", "not found"}) + for dep in k + ["system", "not found"]: output_string += _underlined_text(dep) output_string += print_linkages(inverted_map[dep], show_files=show_files) else: - raise ValueError("Unrecognized groupby: %s" % groupby) - if hasattr(output_string, 'decode'): - output_string = output_string.decode('utf-8') + raise ValueError(f"Unrecognized groupby: {groupby}") + if hasattr(output_string, "decode"): + output_string = output_string.decode("utf-8") return output_string -def inspect_objects(packages, prefix=sys.prefix, groupby='package'): - installed = _installed(prefix) - - output_string = "" - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - raise ValueError(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - output_string += _underlined_text(pkg) +def inspect_objects( + packages: Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + groupby: str = "package", +): + if not on_mac: + sys.exit("Error: conda inspect objects is only implemented in OS X") - if not sys.platform.startswith('darwin'): - sys.exit("Error: conda inspect objects is only implemented in OS X") + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} - if dist == untracked_package: + output_string = "" + for name in ensure_list(packages): + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + raise ValueError(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + + output_string += _underlined_text(name) info = [] for f in obj_files: - f_info = {} path = join(prefix, f) - filetype = codefile_type(path) - if filetype == 'machofile': - f_info['filetype'] = human_filetype(path, None) - f_info['rpath'] = ':'.join(get_rpaths(path)) - f_info['filename'] = f - info.append(f_info) + codefile = codefile_class(path, skip_symlinks=True) + if codefile == machofile: + info.append( + { + "filetype": human_filetype(path, None), + "rpath": ":".join(get_rpaths(path)), + "filename": f, + } + ) output_string += print_object_info(info, groupby) - if hasattr(output_string, 'decode'): - output_string = output_string.decode('utf-8') + if hasattr(output_string, "decode"): + output_string = output_string.decode("utf-8") return output_string def get_hash_input(packages): hash_inputs = {} for pkg in ensure_list(packages): - pkgname = os.path.basename(pkg)[:-8] + pkgname = os.path.basename(pkg) hash_inputs[pkgname] = {} - hash_input = package_has_file(pkg, 'info/hash_input.json') + hash_input = package_has_file(pkg, "info/hash_input.json") if hash_input: - hash_inputs[pkgname]['recipe'] = json.loads(hash_input) + hash_inputs[pkgname]["recipe"] = json.loads(hash_input) else: hash_inputs[pkgname] = "" diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 585aece94e..6ec2195eb0 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -1,28 +1,44 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from functools import partial -from io import StringIO +from __future__ import annotations + +import datetime import json import os import pathlib import re import time -import datetime -from typing import IO, Any, Optional +from functools import partial +from io import StringIO, TextIOBase +from typing import TYPE_CHECKING from warnings import warn import jinja2 -import toml import yaml +from frozendict import deepfreeze +from . import _load_setup_py_data from .environ import get_dict as get_environ -from .utils import get_installed_packages, apply_pin_expressions, get_logger, HashableDict +from .exceptions import CondaBuildException from .render import get_env_dependencies -from .utils import copy_into, check_call_env, rm_rf, ensure_valid_spec +from .utils import ( + apply_pin_expressions, + check_call_env, + copy_into, + ensure_valid_spec, + get_installed_packages, + get_logger, + rm_rf, +) from .variants import DEFAULT_COMPILERS -from .exceptions import CondaBuildException -from . import _load_setup_py_data +try: + import tomllib # Python 3.11 +except: + import tomli as tomllib + +if TYPE_CHECKING: + from typing import IO, Any log = get_logger(__name__) @@ -42,20 +58,27 @@ class UndefinedNeverFail(jinja2.Undefined): you can detect which undefined names were used by inspecting that list. Be sure to clear the all_undefined_names list before calling template.render(). """ + all_undefined_names = [] - def __init__(self, hint=None, obj=jinja2.runtime.missing, name=None, - exc=jinja2.exceptions.UndefinedError): + def __init__( + self, + hint=None, + obj=jinja2.runtime.missing, + name=None, + exc=jinja2.exceptions.UndefinedError, + ): jinja2.Undefined.__init__(self, hint, obj, name, exc) # Using any of these methods on an Undefined variable # results in another Undefined variable. - __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ - __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ - __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ - __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = \ - __complex__ = __pow__ = __rpow__ = \ - lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) + __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = __truediv__ = ( + __rtruediv__ + ) = __floordiv__ = __rfloordiv__ = __mod__ = __rmod__ = __pos__ = __neg__ = ( + __call__ + ) = __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __complex__ = __pow__ = ( + __rpow__ + ) = lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) # Accessing an attribute of an Undefined variable # results in another Undefined variable. @@ -63,12 +86,12 @@ def __getattr__(self, k): try: return object.__getattr__(self, k) except AttributeError: - self._return_undefined(self._undefined_name + '.' + k) + self._return_undefined(self._undefined_name + "." + k) # Unlike the methods above, Python requires that these # few methods must always return the correct type - __str__ = __repr__ = lambda self: self._return_value('') - __unicode__ = lambda self: self._return_value('') + __str__ = __repr__ = lambda self: self._return_value("") + __unicode__ = lambda self: self._return_value("") __int__ = lambda self: self._return_value(0) __float__ = lambda self: self._return_value(0.0) __nonzero__ = lambda self: self._return_value(False) @@ -76,10 +99,12 @@ def __getattr__(self, k): def _return_undefined(self, result_name): # Record that this undefined variable was actually used. UndefinedNeverFail.all_undefined_names.append(self._undefined_name) - return UndefinedNeverFail(hint=self._undefined_hint, - obj=self._undefined_obj, - name=result_name, - exc=self._undefined_exception) + return UndefinedNeverFail( + hint=self._undefined_hint, + obj=self._undefined_obj, + name=result_name, + exc=self._undefined_exception, + ) def _return_value(self, value=None): # Record that this undefined variable was actually used. @@ -100,76 +125,110 @@ def __init__(self, unfiltered_loader, config): def get_source(self, environment, template): # we have circular imports here. Do a local import - from .metadata import select_lines, ns_cfg - contents, filename, uptodate = self._unfiltered_loader.get_source(environment, - template) - return (select_lines(contents, ns_cfg(self.config), - variants_in_place=bool(self.config.variant)), filename, uptodate) + from .metadata import get_selectors, select_lines + + contents, filename, uptodate = self._unfiltered_loader.get_source( + environment, template + ) + return ( + select_lines( + contents, + get_selectors(self.config), + variants_in_place=bool(self.config.variant), + ), + filename, + uptodate, + ) -def load_setup_py_data(m, setup_file='setup.py', from_recipe_dir=False, recipe_dir=None, - permit_undefined_jinja=True): +def load_setup_py_data( + m, + setup_file="setup.py", + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): _setuptools_data = None # we must copy the script into the work folder to avoid incompatible pyc files - origin_setup_script = os.path.join(os.path.dirname(__file__), '_load_setup_py_data.py') - dest_setup_script = os.path.join(m.config.work_dir, '_load_setup_py_data.py') + origin_setup_script = os.path.join( + os.path.dirname(__file__), "_load_setup_py_data.py" + ) + dest_setup_script = os.path.join(m.config.work_dir, "_load_setup_py_data.py") copy_into(origin_setup_script, dest_setup_script) env = get_environ(m) env["CONDA_BUILD_STATE"] = "RENDER" if os.path.isfile(m.config.build_python): args = [m.config.build_python, dest_setup_script, m.config.work_dir, setup_file] if from_recipe_dir: - assert recipe_dir, 'recipe_dir must be set if from_recipe_dir is True' - args.append('--from-recipe-dir') - args.extend(['--recipe-dir', recipe_dir]) + assert recipe_dir, "recipe_dir must be set if from_recipe_dir is True" + args.append("--from-recipe-dir") + args.extend(["--recipe-dir", recipe_dir]) if permit_undefined_jinja: - args.append('--permit-undefined-jinja') + args.append("--permit-undefined-jinja") check_call_env(args, env=env) # this is a file that the subprocess will have written - with open(os.path.join(m.config.work_dir, 'conda_build_loaded_setup_py.json')) as f: + with open( + os.path.join(m.config.work_dir, "conda_build_loaded_setup_py.json") + ) as f: _setuptools_data = json.load(f) else: try: - _setuptools_data = _load_setup_py_data.load_setup_py_data(setup_file, - from_recipe_dir=from_recipe_dir, - recipe_dir=recipe_dir, - work_dir=m.config.work_dir, - permit_undefined_jinja=permit_undefined_jinja) + _setuptools_data = _load_setup_py_data.load_setup_py_data( + setup_file, + from_recipe_dir=from_recipe_dir, + recipe_dir=recipe_dir, + work_dir=m.config.work_dir, + permit_undefined_jinja=permit_undefined_jinja, + ) except (TypeError, OSError): # setup.py file doesn't yet exist. Will get picked up in future parsings pass except ImportError as e: if permit_undefined_jinja: - log.debug("Reading setup.py failed due to missing modules. This is probably OK, " - "since it may succeed in later passes. Watch for incomplete recipe " - "info, though.") + log.debug( + "Reading setup.py failed due to missing modules. This is probably OK, " + "since it may succeed in later passes. Watch for incomplete recipe " + "info, though." + ) else: - raise CondaBuildException("Could not render recipe - need modules " - "installed in root env. Import error was \"{}\"".format(e)) + raise CondaBuildException( + "Could not render recipe - need modules " + f'installed in root env. Import error was "{e}"' + ) # cleanup: we must leave the source tree empty unless the source code is already present - rm_rf(os.path.join(m.config.work_dir, '_load_setup_py_data.py')) + rm_rf(os.path.join(m.config.work_dir, "_load_setup_py_data.py")) return _setuptools_data if _setuptools_data else {} -def load_setuptools(m, setup_file='setup.py', from_recipe_dir=False, recipe_dir=None, - permit_undefined_jinja=True): +def load_setuptools( + m, + setup_file="setup.py", + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): warn( "conda_build.jinja_context.load_setuptools is pending deprecation in a future release. " "Use conda_build.jinja_context.load_setup_py_data instead.", PendingDeprecationWarning, ) - return load_setup_py_data(m, setup_file=setup_file, from_recipe_dir=from_recipe_dir, - recipe_dir=recipe_dir, permit_undefined_jinja=permit_undefined_jinja) + return load_setup_py_data( + m, + setup_file=setup_file, + from_recipe_dir=from_recipe_dir, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ) def load_npm(): - mode_dict = {'mode': 'r', 'encoding': 'utf-8'} - with open('package.json', **mode_dict) as pkg: + mode_dict = {"mode": "r", "encoding": "utf-8"} + with open("package.json", **mode_dict) as pkg: return json.load(pkg) def _find_file(file_name: str, from_recipe_dir: bool, recipe_dir: str, config) -> str: - """ Get the path to the given file which may be in the work_dir + """Get the path to the given file which may be in the work_dir or in the recipe_dir. Note, the returned file name may not exist. @@ -186,8 +245,14 @@ def _find_file(file_name: str, from_recipe_dir: bool, recipe_dir: str, config) - return path -def load_file_regex(config, load_file, regex_pattern, from_recipe_dir=False, - recipe_dir=None, permit_undefined_jinja=True): +def load_file_regex( + config, + load_file, + regex_pattern, + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): try: load_file = _find_file(load_file, from_recipe_dir, recipe_dir, config) except FileNotFoundError as e: @@ -203,8 +268,17 @@ def load_file_regex(config, load_file, regex_pattern, from_recipe_dir=False, cached_env_dependencies = {} -def pin_compatible(m, package_name, lower_bound=None, upper_bound=None, min_pin='x.x.x.x.x.x', - max_pin='x', permit_undefined_jinja=False, exact=False, bypass_env_check=False): +def pin_compatible( + m, + package_name, + lower_bound=None, + upper_bound=None, + min_pin="x.x.x.x.x.x", + max_pin="x", + permit_undefined_jinja=False, + exact=False, + bypass_env_check=False, +): """dynamically pin based on currently installed version. only mandatory input is package_name. @@ -223,46 +297,60 @@ def pin_compatible(m, package_name, lower_bound=None, upper_bound=None, min_pin= # There are two cases considered here (so far): # 1. Good packages that follow semver style (if not philosophy). For example, 1.2.3 # 2. Evil packages that cram everything alongside a single major version. For example, 9b - key = (m.name(), HashableDict(m.config.variant)) + key = (m.name(), deepfreeze(m.config.variant)) if key in cached_env_dependencies: pins = cached_env_dependencies[key] else: if m.is_cross and not m.build_is_host: - pins, _, _ = get_env_dependencies(m, 'host', m.config.variant) + pins, _, _ = get_env_dependencies(m, "host", m.config.variant) else: - pins, _, _ = get_env_dependencies(m, 'build', m.config.variant) + pins, _, _ = get_env_dependencies(m, "build", m.config.variant) if m.build_is_host: - host_pins, _, _ = get_env_dependencies(m, 'host', m.config.variant) + host_pins, _, _ = get_env_dependencies(m, "host", m.config.variant) pins.extend(host_pins) cached_env_dependencies[key] = pins - versions = {p.split(' ')[0]: p.split(' ')[1:] for p in pins} + versions = {p.split(" ")[0]: p.split(" ")[1:] for p in pins} if versions: if exact and versions.get(package_name): - compatibility = ' '.join(versions[package_name]) + compatibility = " ".join(versions[package_name]) else: version = lower_bound or versions.get(package_name) if version: - if hasattr(version, '__iter__') and not isinstance(version, str): + if hasattr(version, "__iter__") and not isinstance(version, str): version = version[0] else: version = str(version) if upper_bound: if min_pin or lower_bound: compatibility = ">=" + str(version) + "," - compatibility += f'<{upper_bound}' + compatibility += f"<{upper_bound}" else: compatibility = apply_pin_expressions(version, min_pin, max_pin) - if (not compatibility and not permit_undefined_jinja and not bypass_env_check): - check = re.compile(r'pin_compatible\s*\(\s*[''"]{}[''"]'.format(package_name)) + if not compatibility and not permit_undefined_jinja and not bypass_env_check: + check = re.compile(rf'pin_compatible\s*\(\s*["]{package_name}["]') if check.search(m.extract_requirements_text()): - raise RuntimeError("Could not get compatibility information for {} package. " - "Is it one of your host dependencies?".format(package_name)) - return " ".join((package_name, compatibility)) if compatibility is not None else package_name + raise RuntimeError( + f"Could not get compatibility information for {package_name} package. " + "Is it one of your host dependencies?" + ) + return ( + " ".join((package_name, compatibility)) + if compatibility is not None + else package_name + ) -def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min_pin, max_pin, - exact, permit_undefined_jinja, skip_build_id=False): +def pin_subpackage_against_outputs( + metadata, + matching_package_keys, + outputs, + min_pin, + max_pin, + exact, + permit_undefined_jinja, + skip_build_id=False, +): pin = None if matching_package_keys: # two ways to match: @@ -278,8 +366,9 @@ def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min # name, used vars+values). It used to be (package name, variant) - # but that was really big and hard to look at. shared_vars = set(variant.keys()) & set(metadata.config.variant.keys()) - if not shared_vars or all(variant[sv] == metadata.config.variant[sv] - for sv in shared_vars): + if not shared_vars or all( + variant[sv] == metadata.config.variant[sv] for sv in shared_vars + ): key = (pkg_name, variant) break @@ -289,20 +378,32 @@ def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min pin = None else: if exact: - pin = " ".join([sp_m.name(), sp_m.version(), - sp_m.build_id() if not skip_build_id else str(sp_m.build_number())]) + pin = " ".join( + [ + sp_m.name(), + sp_m.version(), + sp_m.build_id() + if not skip_build_id + else str(sp_m.build_number()), + ] + ) else: - pin = "{} {}".format(sp_m.name(), - apply_pin_expressions(sp_m.version(), min_pin, - max_pin)) + pin = f"{sp_m.name()} {apply_pin_expressions(sp_m.version(), min_pin, max_pin)}" else: pin = matching_package_keys[0][0] return pin -def pin_subpackage(metadata, subpackage_name, min_pin='x.x.x.x.x.x', max_pin='x', - exact=False, permit_undefined_jinja=False, allow_no_other_outputs=False, - skip_build_id=False): +def pin_subpackage( + metadata, + subpackage_name, + min_pin="x.x.x.x.x.x", + max_pin="x", + exact=False, + permit_undefined_jinja=False, + allow_no_other_outputs=False, + skip_build_id=False, +): """allow people to specify pinnings based on subpackages that are defined in the recipe. For example, given a compiler package, allow it to specify either a compatible or exact @@ -310,78 +411,96 @@ def pin_subpackage(metadata, subpackage_name, min_pin='x.x.x.x.x.x', max_pin='x' """ pin = None - if not hasattr(metadata, 'other_outputs'): + if not hasattr(metadata, "other_outputs"): if allow_no_other_outputs: pin = subpackage_name else: - raise ValueError("Bug in conda-build: we need to have info about other outputs in " - "order to allow pinning to them. It's not here.") + raise ValueError( + "Bug in conda-build: we need to have info about other outputs in " + "order to allow pinning to them. It's not here." + ) else: # two ways to match: # 1. only one other output named the same as the subpackage_name from the key # 2. whole key matches (both subpackage name and variant) keys = list(metadata.other_outputs.keys()) matching_package_keys = [k for k in keys if k[0] == subpackage_name] - pin = pin_subpackage_against_outputs(metadata, matching_package_keys, - metadata.other_outputs, min_pin, max_pin, - exact, permit_undefined_jinja, - skip_build_id=skip_build_id) + pin = pin_subpackage_against_outputs( + metadata, + matching_package_keys, + metadata.other_outputs, + min_pin, + max_pin, + exact, + permit_undefined_jinja, + skip_build_id=skip_build_id, + ) if not pin: pin = subpackage_name if not permit_undefined_jinja and not allow_no_other_outputs: - raise ValueError("Didn't find subpackage version info for '{}', which is used in a" - " pin_subpackage expression. Is it actually a subpackage? If not, " - "you want pin_compatible instead.".format(subpackage_name)) + raise ValueError( + f"Didn't find subpackage version info for '{subpackage_name}', which is used in a" + " pin_subpackage expression. Is it actually a subpackage? If not, " + "you want pin_compatible instead." + ) return pin def native_compiler(language, config): compiler = language - for platform in [config.platform, config.platform.split('-')[0]]: + for platform in [config.platform, config.platform.split("-")[0]]: try: compiler = DEFAULT_COMPILERS[platform][language] break except KeyError: continue - if hasattr(compiler, 'keys'): - compiler = compiler.get(config.variant.get('python', 'nope'), 'vs2017') + if hasattr(compiler, "keys"): + compiler = compiler.get(config.variant.get("python", "nope"), "vs2017") return compiler -def compiler(language, config, permit_undefined_jinja=False): - """Support configuration of compilers. This is somewhat platform specific. +def _target(language, config, permit_undefined_jinja=False, component="compiler"): + """Support configuration of compilers/stdlib. This is somewhat platform specific. - Native compilers never list their host - it is always implied. Generally, they are + Native compilers/stdlib never list their host - it is always implied. Generally, they are metapackages, pointing at a package that does specify the host. These in turn may be metapackages, pointing at a package where the host is the same as the target (both being the native architecture). """ - compiler = native_compiler(language, config) + if component == "compiler": + package_prefix = native_compiler(language, config) + else: + package_prefix = language + version = None if config.variant: - target_platform = config.variant.get('target_platform', config.subdir) - language_compiler_key = f'{language}_compiler' - # fall back to native if language-compiler is not explicitly set in variant - compiler = config.variant.get(language_compiler_key, compiler) - version = config.variant.get(language_compiler_key + '_version') + target_platform = config.variant.get("target_platform", config.subdir) + language_key = f"{language}_{component}" + # fall back to native if language-key is not explicitly set in variant + package_prefix = config.variant.get(language_key, package_prefix) + version = config.variant.get(language_key + "_version") else: target_platform = config.subdir - # support cross compilers. A cross-compiler package will have a name such as + # support cross components. A cross package will have a name such as # gcc_target # gcc_linux-cos6-64 - compiler = '_'.join([compiler, target_platform]) + package = f"{package_prefix}_{target_platform}" if version: - compiler = ' '.join((compiler, version)) - compiler = ensure_valid_spec(compiler, warn=False) - return compiler + package = f"{package} {version}" + package = ensure_valid_spec(package, warn=False) + return package + + +# ensure we have compiler in namespace +compiler = partial(_target, component="compiler") def ccache(method, config, permit_undefined_jinja=False): config.ccache_method = method - return 'ccache' + return "ccache" def cdt(package_name, config, permit_undefined_jinja=False): @@ -431,27 +550,26 @@ def cdt(package_name, config, permit_undefined_jinja=False): } """ # NOQA - cdt_name = 'cos6' + cdt_name = "cos6" arch = config.host_arch or config.arch - if arch == 'ppc64le' or arch == 'aarch64' or arch == 'ppc64' or arch == 's390x': - cdt_name = 'cos7' + if arch == "ppc64le" or arch == "aarch64" or arch == "ppc64" or arch == "s390x": + cdt_name = "cos7" cdt_arch = arch else: - cdt_arch = 'x86_64' if arch == '64' else 'i686' + cdt_arch = "x86_64" if arch == "64" else "i686" if config.variant: - cdt_name = config.variant.get('cdt_name', cdt_name) - cdt_arch = config.variant.get('cdt_arch', cdt_arch) - if ' ' in package_name: - name = package_name.split(' ')[0] - ver_build = package_name.split(' ')[1:] - result = (name + '-' + cdt_name + '-' + cdt_arch + ' ' + ' '.join(ver_build)) + cdt_name = config.variant.get("cdt_name", cdt_name) + cdt_arch = config.variant.get("cdt_arch", cdt_arch) + if " " in package_name: + name = package_name.split(" ")[0] + ver_build = package_name.split(" ")[1:] + result = name + "-" + cdt_name + "-" + cdt_arch + " " + " ".join(ver_build) else: - result = (package_name + '-' + cdt_name + '-' + cdt_arch) + result = package_name + "-" + cdt_name + "-" + cdt_arch return result -def resolved_packages(m, env, permit_undefined_jinja=False, - bypass_env_check=False): +def resolved_packages(m, env, permit_undefined_jinja=False, bypass_env_check=False): """Returns the final list of packages that are listed in host or build. This include all packages (including the indirect dependencies) that will be installed in the host or build environment. An example usage of this @@ -483,8 +601,8 @@ def resolved_packages(m, env, permit_undefined_jinja=False, - openssl 1.0.2n hb7f436b_0 - zlib 1.2.11 ha838bed_2 """ - if env not in ('host', 'build'): - raise ValueError('Only host and build dependencies are supported.') + if env not in ("host", "build"): + raise ValueError("Only host and build dependencies are supported.") package_names = [] @@ -496,11 +614,26 @@ def resolved_packages(m, env, permit_undefined_jinja=False, return package_names +def _toml_load(stream): + """ + Load .toml from a pathname. + """ + if isinstance(stream, (TextIOBase, str)): + if isinstance(stream, TextIOBase): + data = stream.read() + else: + data = stream + return tomllib.loads(data) + + # tomllib prefers binary files + return tomllib.load(stream) + + _file_parsers = { "json": json.load, "yaml": yaml.safe_load, "yml": yaml.safe_load, - "toml": toml.load, + "toml": _toml_load, } @@ -515,9 +648,16 @@ def _load_data(stream: IO, fmt: str, *args, **kwargs) -> Any: return load(stream, *args, **kwargs) -def load_file_data(filename: str, fmt: Optional[str] = None, *args, config=None, - from_recipe_dir=False, recipe_dir=None, permit_undefined_jinja=True, - **kwargs): +def load_file_data( + filename: str, + fmt: str | None = None, + *args, + config=None, + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, + **kwargs, +): """Loads a file and returns the parsed data. For example to load file data from a JSON file, you can use any of: @@ -534,7 +674,9 @@ def load_file_data(filename: str, fmt: Optional[str] = None, *args, config=None, raise else: with open(file_path) as f: - return _load_data(f, fmt or pathlib.Path(filename).suffix.lstrip("."), *args, **kwargs) + return _load_data( + f, fmt or pathlib.Path(filename).suffix.lstrip("."), *args, **kwargs + ) def load_str_data(string: str, fmt: str, *args, **kwargs): @@ -549,48 +691,100 @@ def load_str_data(string: str, fmt: str, *args, **kwargs): return _load_data(StringIO(string), fmt, *args, **kwargs) -def context_processor(initial_metadata, recipe_dir, config, permit_undefined_jinja, - allow_no_other_outputs=False, bypass_env_check=False, skip_build_id=False, - variant=None): +def context_processor( + initial_metadata, + recipe_dir, + config, + permit_undefined_jinja, + allow_no_other_outputs=False, + bypass_env_check=False, + skip_build_id=False, + variant=None, +): """ Return a dictionary to use as context for jinja templates. initial_metadata: Augment the context with values from this MetaData object. Used to bootstrap metadata contents via multiple parsing passes. """ - ctx = get_environ(m=initial_metadata, for_env=False, skip_build_id=skip_build_id, - escape_backslash=True, variant=variant) + ctx = get_environ( + m=initial_metadata, + for_env=False, + skip_build_id=skip_build_id, + escape_backslash=True, + variant=variant, + ) environ = dict(os.environ) environ.update(get_environ(m=initial_metadata, skip_build_id=skip_build_id)) ctx.update( - load_setup_py_data=partial(load_setup_py_data, m=initial_metadata, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_setup_py_data=partial( + load_setup_py_data, + m=initial_metadata, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), # maintain old alias for backwards compatibility: - load_setuptools=partial(load_setuptools, m=initial_metadata, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_setuptools=partial( + load_setuptools, + m=initial_metadata, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), load_npm=load_npm, - load_file_regex=partial(load_file_regex, config=config, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), - load_file_data=partial(load_file_data, config=config, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_file_regex=partial( + load_file_regex, + config=config, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), + load_file_data=partial( + load_file_data, + config=config, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), load_str_data=load_str_data, - installed=get_installed_packages(os.path.join(config.host_prefix, 'conda-meta')), - pin_compatible=partial(pin_compatible, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - bypass_env_check=bypass_env_check), - pin_subpackage=partial(pin_subpackage, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - allow_no_other_outputs=allow_no_other_outputs, - skip_build_id=skip_build_id), - compiler=partial(compiler, config=config, permit_undefined_jinja=permit_undefined_jinja), + installed=get_installed_packages( + os.path.join(config.host_prefix, "conda-meta") + ), + pin_compatible=partial( + pin_compatible, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + bypass_env_check=bypass_env_check, + ), + pin_subpackage=partial( + pin_subpackage, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + allow_no_other_outputs=allow_no_other_outputs, + skip_build_id=skip_build_id, + ), + compiler=partial( + _target, + config=config, + permit_undefined_jinja=permit_undefined_jinja, + component="compiler", + ), + stdlib=partial( + _target, + config=config, + permit_undefined_jinja=permit_undefined_jinja, + component="stdlib", + ), cdt=partial(cdt, config=config, permit_undefined_jinja=permit_undefined_jinja), - ccache=partial(ccache, config=config, permit_undefined_jinja=permit_undefined_jinja), - resolved_packages=partial(resolved_packages, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - bypass_env_check=bypass_env_check), + ccache=partial( + ccache, config=config, permit_undefined_jinja=permit_undefined_jinja + ), + resolved_packages=partial( + resolved_packages, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + bypass_env_check=bypass_env_check, + ), time=time, datetime=datetime, - - environ=environ) + environ=environ, + ) return ctx diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 542166dd30..ab101274ae 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -2,8 +2,9 @@ # SPDX-License-Identifier: BSD-3-Clause import re import string -from conda_build import exceptions -from conda_build.utils import comma_join + +from . import exceptions +from .utils import comma_join allowed_license_families = """ AGPL @@ -24,71 +25,68 @@ """.split() # regular expressions -gpl2_regex = re.compile('GPL[^3]*2') # match GPL2 -gpl3_regex = re.compile('GPL[^2]*3') # match GPL3 -gpl23_regex = re.compile('GPL[^2]*>= *2') # match GPL >= 2 -cc_regex = re.compile(r'CC\w+') # match CC -punk_regex = re.compile('[%s]' % re.escape(string.punctuation)) # removes punks +gpl2_regex = re.compile("GPL[^3]*2") # match GPL2 +gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 +gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 +cc_regex = re.compile(r"CC\w+") # match CC +punk_regex = re.compile(f"[{re.escape(string.punctuation)}]") # removes punks def match_gpl3(family): """True if family matches GPL3 or GPL >= 2, else False""" - return (gpl23_regex.search(family) or - gpl3_regex.search(family)) + return gpl23_regex.search(family) or gpl3_regex.search(family) def normalize(s): """Set to ALL CAPS, replace common GPL patterns, and strip""" s = s.upper() - s = re.sub('GENERAL PUBLIC LICENSE', 'GPL', s) - s = re.sub('LESSER *', 'L', s) - s = re.sub('AFFERO *', 'A', s) + s = re.sub("GENERAL PUBLIC LICENSE", "GPL", s) + s = re.sub("LESSER *", "L", s) + s = re.sub("AFFERO *", "A", s) return s.strip() def remove_special_characters(s): """Remove punctuation, spaces, tabs, and line feeds""" - s = punk_regex.sub(' ', s) - s = re.sub(r'\s+', '', s) + s = punk_regex.sub(" ", s) + s = re.sub(r"\s+", "", s) return s -def guess_license_family_from_index(index=None, - recognized=allowed_license_families): +def guess_license_family_from_index(index=None, recognized=allowed_license_families): """Return best guess of license_family from the conda package index. Note: Logic here is simple, and focuses on existing set of allowed families """ if isinstance(index, dict): - license_name = index.get('license_family', index.get('license')) + license_name = index.get("license_family", index.get("license")) else: # index argument is actually a string license_name = index return guess_license_family(license_name, recognized) -def guess_license_family(license_name=None, - recognized=allowed_license_families): +def guess_license_family(license_name=None, recognized=allowed_license_families): """Return best guess of license_family from the conda package index. Note: Logic here is simple, and focuses on existing set of allowed families """ if license_name is None: - return 'NONE' + return "NONE" license_name = normalize(license_name) # Handle GPL families as special cases # Remove AGPL and LGPL before looking for GPL2 and GPL3 - sans_lgpl = re.sub('[A,L]GPL', '', license_name) + sans_lgpl = re.sub("[A,L]GPL", "", license_name) if match_gpl3(sans_lgpl): - return 'GPL3' + return "GPL3" elif gpl2_regex.search(sans_lgpl): - return 'GPL2' + return "GPL2" elif cc_regex.search(license_name): - return 'CC' + return "CC" license_name = remove_special_characters(license_name) for family in recognized: @@ -97,17 +95,21 @@ def guess_license_family(license_name=None, for family in recognized: if license_name in remove_special_characters(family): return family - return 'OTHER' + return "OTHER" def ensure_valid_license_family(meta): try: - license_family = meta['about']['license_family'] + license_family = meta["about"]["license_family"] except KeyError: return - allowed_families = [remove_special_characters(normalize(fam)) - for fam in allowed_license_families] + allowed_families = [ + remove_special_characters(normalize(fam)) for fam in allowed_license_families + ] if remove_special_characters(normalize(license_family)) not in allowed_families: - raise RuntimeError(exceptions.indent( - "about/license_family '%s' not allowed. Allowed families are %s." % - (license_family, comma_join(sorted(allowed_license_families))))) + raise RuntimeError( + exceptions.indent( + f"about/license_family '{license_family}' not allowed. " + f"Allowed families are {comma_join(sorted(allowed_license_families))}." + ) + ) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 6277601647..2552682840 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1,36 +1,51 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict -import contextlib +from __future__ import annotations + import copy -from functools import lru_cache import hashlib import json import os -from os.path import isfile, join import re import sys import time +import warnings +from collections import OrderedDict +from functools import lru_cache +from os.path import isfile, join +from typing import TYPE_CHECKING, NamedTuple, overload from bs4 import UnicodeDammit - -from .conda_interface import md5_file -from .conda_interface import non_x86_linux_machines -from .conda_interface import MatchSpec -from .conda_interface import envs_dirs - -from conda_build import exceptions, utils, variants, environ -from conda_build.features import feature_list -from conda_build.config import Config, get_or_merge_config -from conda_build.utils import ( +from conda.base.context import context +from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec +from frozendict import deepfreeze + +from . import exceptions, utils +from .config import Config, get_or_merge_config +from .features import feature_list +from .license_family import ensure_valid_license_family +from .utils import ( + DEFAULT_SUBDIRS, ensure_list, - find_recipe, expand_globs, + find_recipe, get_installed_packages, - HashableDict, insert_variant_versions, + on_win, ) -from conda_build.license_family import ensure_valid_license_family +from .variants import ( + dict_of_lists_to_list_of_dicts, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, + find_used_variables_in_text, + get_default_variant, + get_vars, + list_of_dicts_to_dict_of_lists, +) + +if TYPE_CHECKING: + from typing import Any, Literal try: import yaml @@ -41,11 +56,37 @@ ) try: - loader = yaml.CLoader -except: - loader = yaml.Loader + Loader = yaml.CLoader +except AttributeError: + Loader = yaml.Loader -on_win = sys.platform == "win32" + +class StringifyNumbersLoader(Loader): + @classmethod + def remove_implicit_resolver(cls, tag): + if "yaml_implicit_resolvers" not in cls.__dict__: + cls.yaml_implicit_resolvers = { + k: v[:] for k, v in cls.yaml_implicit_resolvers.items() + } + for ch in tuple(cls.yaml_implicit_resolvers): + resolvers = [(t, r) for t, r in cls.yaml_implicit_resolvers[ch] if t != tag] + if resolvers: + cls.yaml_implicit_resolvers[ch] = resolvers + else: + del cls.yaml_implicit_resolvers[ch] + + @classmethod + def remove_constructor(cls, tag): + if "yaml_constructors" not in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + if tag in cls.yaml_constructors: + del cls.yaml_constructors[tag] + + +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:int") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:int") # arches that don't follow exact names in the subdir need to be mapped here ARCH_MAP = {"32": "x86", "64": "x86_64"} @@ -79,42 +120,68 @@ used_vars_cache = {} -def ns_cfg(config): +def get_selectors(config: Config) -> dict[str, bool]: + """Aggregates selectors for use in recipe templating. + + Derives selectors from the config and variants to be injected + into the Jinja environment prior to templating. + + Args: + config (Config): The config object + + Returns: + dict[str, bool]: Dictionary of on/off selectors for Jinja + """ # Remember to update the docs of any of this changes plat = config.host_subdir d = dict( - linux=plat.startswith("linux-"), linux32=bool(plat == "linux-32"), linux64=bool(plat == "linux-64"), arm=plat.startswith("linux-arm"), - osx=plat.startswith("osx-"), - unix=plat.startswith(("linux-", "osx-")), - win=plat.startswith("win-"), + unix=plat.startswith(("linux-", "osx-", "emscripten-")), win32=bool(plat == "win-32"), win64=bool(plat == "win-64"), - x86=plat.endswith(("-32", "-64")), - x86_64=plat.endswith("-64"), os=os, environ=os.environ, nomkl=bool(int(os.environ.get("FEATURE_NOMKL", False))), ) - defaults = variants.get_default_variant(config) + # Add the current platform to the list of subdirs to enable conda-build + # to bootstrap new platforms without a new conda release. + subdirs = list(DEFAULT_SUBDIRS) + [plat] + + # filter out noarch and other weird subdirs + subdirs = [subdir for subdir in subdirs if "-" in subdir] + + subdir_oses = {subdir.split("-")[0] for subdir in subdirs} + subdir_archs = {subdir.split("-")[1] for subdir in subdirs} + + for subdir_os in subdir_oses: + d[subdir_os] = plat.startswith(f"{subdir_os}-") + + for arch in subdir_archs: + arch_full = ARCH_MAP.get(arch, arch) + d[arch_full] = plat.endswith(f"-{arch}") + if arch == "32": + d["x86"] = plat.endswith(("-32", "-64")) + + defaults = get_default_variant(config) py = config.variant.get("python", defaults["python"]) # there are times when python comes in as a tuple if not hasattr(py, "split"): py = py[0] # go from "3.6 *_cython" -> "36" # or from "3.6.9" -> "36" - py = int("".join(py.split(" ")[0].split(".")[:2])) + py_major, py_minor, *_ = py.split(" ")[0].split(".") + py = int(f"{py_major}{py_minor}") d["build_platform"] = config.build_subdir d.update( dict( py=py, - py3k=bool(30 <= py < 40), - py2k=bool(20 <= py < 30), + py3k=bool(py_major == "3"), + py2k=bool(py_major == "2"), py26=bool(py == 26), py27=bool(py == 27), py33=bool(py == 33), @@ -141,9 +208,6 @@ def ns_cfg(config): d["lua"] = lua d["luajit"] = bool(lua[0] == "2") - for machine in non_x86_linux_machines: - d[machine] = bool(plat.endswith("-%s" % machine)) - for feature, value in feature_list: d[feature] = value d.update(os.environ) @@ -162,6 +226,15 @@ def ns_cfg(config): return d +def ns_cfg(config: Config) -> dict[str, bool]: + warnings.warn( + "`conda_build.metadata.ns_cfg` is pending deprecation and will be removed in a " + "future release. Please use `conda_build.metadata.get_selectors` instead.", + PendingDeprecationWarning, + ) + return get_selectors(config) + + # Selectors must be either: # - at end of the line # - embedded (anywhere) within a comment @@ -204,46 +277,74 @@ def eval_selector(selector_string, namespace, variants_in_place): return eval_selector(next_string, namespace, variants_in_place) -def select_lines(data, namespace, variants_in_place): - lines = [] - - for i, line in enumerate(data.splitlines()): +@lru_cache(maxsize=None) +def _split_line_selector(text: str) -> tuple[tuple[str | None, str], ...]: + lines: list[tuple[str | None, str]] = [] + for line in text.splitlines(): line = line.rstrip() + # skip comment lines, include a blank line as a placeholder + if line.lstrip().startswith("#"): + lines.append((None, "")) + continue + + # include blank lines + if not line: + lines.append((None, "")) + continue + + # user may have quoted entire line to make YAML happy trailing_quote = "" if line and line[-1] in ("'", '"'): trailing_quote = line[-1] - if line.lstrip().startswith("#"): - # Don't bother with comment only lines - continue - m = sel_pat.match(line) - if m: - cond = m.group(3) - try: - if eval_selector(cond, namespace, variants_in_place): - lines.append(m.group(1) + trailing_quote) - except Exception as e: - sys.exit( - """\ -Error: Invalid selector in meta.yaml line %d: -offending line: -%s -exception: -%s -""" - % (i + 1, line, str(e)) - ) + # Checking for "[" and "]" before regex matching every line is a bit faster. + if ( + ("[" in line and "]" in line) + and (match := sel_pat.match(line)) + and (selector := match.group(3)) + ): + # found a selector + lines.append((selector, (match.group(1) + trailing_quote).rstrip())) else: + # no selector found + lines.append((None, line)) + return tuple(lines) + + +def select_lines(text: str, namespace: dict[str, Any], variants_in_place: bool) -> str: + lines = [] + selector_cache: dict[str, bool] = {} + for i, (selector, line) in enumerate(_split_line_selector(text)): + if not selector: + # no selector? include line as is lines.append(line) + else: + # include lines with a selector that evaluates to True + try: + if selector_cache[selector]: + lines.append(line) + except KeyError: + # KeyError: cache miss + try: + value = bool(eval_selector(selector, namespace, variants_in_place)) + selector_cache[selector] = value + if value: + lines.append(line) + except Exception as e: + sys.exit( + f"Error: Invalid selector in meta.yaml line {i + 1}:\n" + f"offending line:\n" + f"{line}\n" + f"exception:\n" + f"{e.__class__.__name__}: {e}\n" + ) return "\n".join(lines) + "\n" def yamlize(data): try: - with stringify_numbers(): - loaded_data = yaml.load(data, Loader=loader) - return loaded_data + return yaml.load(data, Loader=StringifyNumbersLoader) except yaml.error.YAMLError as e: if "{{" in data: try: @@ -261,7 +362,7 @@ def ensure_valid_fields(meta): pin_depends = meta.get("build", {}).get("pin_depends", "") if pin_depends and pin_depends not in ("", "record", "strict"): raise RuntimeError( - "build/pin_depends must be 'record' or 'strict' - " "not '%s'" % pin_depends + f"build/pin_depends must be 'record' or 'strict' - not '{pin_depends}'" ) @@ -287,9 +388,7 @@ def _trim_None_strings(meta_dict): meta_dict[key] = keep else: log.debug( - "found unrecognized data type in dictionary: {}, type: {}".format( - value, type(value) - ) + f"found unrecognized data type in dictionary: {value}, type: {type(value)}" ) return meta_dict @@ -298,7 +397,7 @@ def ensure_valid_noarch_value(meta): build_noarch = meta.get("build", {}).get("noarch") if build_noarch and build_noarch not in NOARCH_TYPES: raise exceptions.CondaBuildException( - "Invalid value for noarch: %s" % build_noarch + f"Invalid value for noarch: {build_noarch}" ) @@ -348,10 +447,10 @@ def _variants_equal(metadata, output_metadata): def ensure_matching_hashes(output_metadata): envs = "build", "host", "run" problemos = [] - for (_, m) in output_metadata.values(): - for (_, om) in output_metadata.values(): + for _, m in output_metadata.values(): + for _, om in output_metadata.values(): if m != om: - run_exports = om.meta.get("build", {}).get("run_exports", []) + run_exports = om.get_value("build/run_exports", []) if hasattr(run_exports, "keys"): run_exports_list = [] for export_type in utils.RUN_EXPORTS_TYPES: @@ -384,7 +483,11 @@ def ensure_matching_hashes(output_metadata): def parse(data, config, path=None): - data = select_lines(data, ns_cfg(config), variants_in_place=bool(config.variant)) + data = select_lines( + data, + get_selectors(config), + variants_in_place=bool(config.variant), + ) res = yamlize(data) # ensure the result is a dict if res is None: @@ -402,14 +505,14 @@ def parse(data, config, path=None): or (hasattr(res[field], "__iter__") and not isinstance(res[field], str)) ): raise RuntimeError( - "The %s field should be a dict or list of dicts, not " - "%s in file %s." % (field, res[field].__class__.__name__, path) + f"The {field} field should be a dict or list of dicts, not " + f"{res[field].__class__.__name__} in file {path}." ) else: if not isinstance(res[field], dict): raise RuntimeError( - "The %s field should be a dict, not %s in file %s." - % (field, res[field].__class__.__name__, path) + f"The {field} field should be a dict, not " + f"{res[field].__class__.__name__} in file {path}." ) ensure_valid_fields(res) @@ -446,6 +549,8 @@ def parse(data, config, path=None): "svn_url": str, "svn_rev": None, "svn_ignore_externals": None, + "svn_username": None, + "svn_password": None, "folder": None, "no_hoist": None, "patches": list, @@ -486,7 +591,7 @@ def parse(data, config, path=None): "provides_features": dict, "force_use_keys": list, "force_ignore_keys": list, - "merge_build_host": bool, + "merge_build_host": None, "pre-link": str, "post-link": str, "pre-unlink": str, @@ -556,6 +661,7 @@ def parse(data, config, path=None): "prelink_message": None, "readme": None, }, + "extra": {}, } # Fields that may either be a dictionary or a list of dictionaries. @@ -669,7 +775,6 @@ def build_string_from_metadata(metadata): ("mro", "mro-base", 3), ("mro", "mro-base_impl", 3), ): - for ms in metadata.ms_depends("run"): for name in ensure_list(names): if ms.name == name and name in build_pkg_names: @@ -716,14 +821,14 @@ def build_string_from_metadata(metadata): # but we don't presently have an API there. def _get_env_path(env_name_or_path): if not os.path.isdir(env_name_or_path): - for envs_dir in list(envs_dirs) + [os.getcwd()]: + for envs_dir in list(context.envs_dirs) + [os.getcwd()]: path = os.path.join(envs_dir, env_name_or_path) if os.path.isdir(path): env_name_or_path = path break bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta") if not os.path.isdir(bootstrap_metadir): - print("Bootstrap environment '%s' not found" % env_name_or_path) + print(f"Bootstrap environment '{env_name_or_path}' not found") sys.exit(1) return env_name_or_path @@ -749,7 +854,7 @@ def toposort(output_metadata_map): will naturally lead to non-overlapping files in each package and also the correct files being present during the install and test procedures, provided they are run in this order.""" - from .conda_interface import _toposort + from conda.common.toposort import _toposort # We only care about the conda packages built by this recipe. Non-conda # packages get sorted to the end. @@ -802,7 +907,10 @@ def toposort(output_metadata_map): return result -def get_output_dicts_from_metadata(metadata, outputs=None): +def get_output_dicts_from_metadata( + metadata: MetaData, + outputs: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: outputs = outputs or metadata.get_section("outputs") if not outputs: @@ -823,8 +931,8 @@ def get_output_dicts_from_metadata(metadata, outputs=None): outputs.append(OrderedDict(name=metadata.name())) for out in outputs: if ( - "package:" in metadata.get_recipe_text() - and out.get("name") == metadata.name() + out.get("name") == metadata.name() + and "package:" in metadata.get_recipe_text() ): combine_top_level_metadata_with_output(metadata, out) return outputs @@ -890,15 +998,8 @@ def finalize_outputs_pass( fm = om if not output_d.get("type") or output_d.get("type").startswith("conda"): outputs[ - ( - fm.name(), - HashableDict( - { - k: copy.deepcopy(fm.config.variant[k]) - for k in fm.get_used_vars() - } - ), - ) + fm.name(), + deepfreeze({k: fm.config.variant[k] for k in fm.get_used_vars()}), ] = (output_d, fm) except exceptions.DependencyNeedsBuildingError as e: if not permit_unsatisfiable_variants: @@ -907,18 +1008,16 @@ def finalize_outputs_pass( log = utils.get_logger(__name__) log.warn( "Could not finalize metadata due to missing dependencies: " - "{}".format(e.packages) + f"{e.packages}" ) outputs[ - ( - metadata.name(), - HashableDict( - { - k: copy.deepcopy(metadata.config.variant[k]) - for k in metadata.get_used_vars() - } - ), - ) + metadata.name(), + deepfreeze( + { + k: metadata.config.variant[k] + for k in metadata.get_used_vars() + } + ), ] = (output_d, metadata) # in-place modification base_metadata.other_outputs = outputs @@ -926,12 +1025,8 @@ def finalize_outputs_pass( final_outputs = OrderedDict() for k, (out_d, m) in outputs.items(): final_outputs[ - ( - m.name(), - HashableDict( - {k: copy.deepcopy(m.config.variant[k]) for k in m.get_used_vars()} - ), - ) + m.name(), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (out_d, m) return final_outputs @@ -949,6 +1044,7 @@ def get_updated_output_dict_from_reparsed_metadata(original_dict, new_outputs): return output_d +@lru_cache(maxsize=200) def _filter_recipe_text(text, extract_pattern=None): if extract_pattern: match = re.search(extract_pattern, text, flags=re.MULTILINE | re.DOTALL) @@ -1031,23 +1127,8 @@ def _hash_dependencies(hashing_dependencies, hash_length): return f"h{hash_.hexdigest()}"[: hash_length + 1] -@contextlib.contextmanager -def stringify_numbers(): - # ensure that numbers are not interpreted as ints or floats. That trips up versions - # with trailing zeros. - implicit_resolver_backup = loader.yaml_implicit_resolvers.copy() - for ch in list("0123456789"): - if ch in loader.yaml_implicit_resolvers: - del loader.yaml_implicit_resolvers[ch] - yield - for ch in list("0123456789"): - if ch in implicit_resolver_backup: - loader.yaml_implicit_resolvers[ch] = implicit_resolver_backup[ch] - - class MetaData: def __init__(self, path, config=None, variant=None): - self.undefined_jinja_vars = [] self.config = get_or_merge_config(config, variant=variant) @@ -1075,33 +1156,28 @@ def __init__(self, path, config=None, variant=None): # establish whether this recipe should squish build and host together @property - def is_cross(self): - return bool(self.get_depends_top_and_out("host")) or "host" in self.meta.get( - "requirements", {} + def is_cross(self) -> bool: + return bool( + self.get_depends_top_and_out("host") + or "host" in self.get_section("requirements") ) @property - def final(self): - return self.get_value("extra/final") + def final(self) -> bool: + return bool(self.get_value("extra/final")) @final.setter - def final(self, boolean): - extra = self.meta.get("extra", {}) - extra["final"] = boolean - self.meta["extra"] = extra + def final(self, value: bool) -> None: + self.meta.setdefault("extra", {})["final"] = bool(value) @property - def disable_pip(self): - return self.config.disable_pip or ( - "build" in self.meta and "disable_pip" in self.meta["build"] - ) + def disable_pip(self) -> bool: + return bool(self.config.disable_pip or self.get_value("build/disable_pip")) @disable_pip.setter - def disable_pip(self, value): - self.config.disable_pip = value - build = self.meta.get("build", {}) - build["disable_pip"] = value - self.meta["build"] = build + def disable_pip(self, value: bool) -> None: + self.config.disable_pip = bool(value) + self.meta.setdefault("build", {})["disable_pip"] = bool(value) def append_metadata_sections( self, sections_file_or_dict, merge, raise_on_clobber=False @@ -1127,10 +1203,9 @@ def append_metadata_sections( ) @property - def is_output(self): - self_name = self.name(fail_ok=True) - parent_name = self.meta.get("extra", {}).get("parent_recipe", {}).get("name") - return bool(parent_name) and parent_name != self_name + def is_output(self) -> str: + parent_name = self.get_value("extra/parent_recipe", {}).get("name") + return parent_name and parent_name != self.name() def parse_again( self, @@ -1197,17 +1272,16 @@ def parse_again( dependencies = _get_dependencies_from_environment(self.config.bootstrap) self.append_metadata_sections(dependencies, merge=True) - if "error_overlinking" in self.meta.get("build", {}): + if "error_overlinking" in self.get_section("build"): self.config.error_overlinking = self.meta["build"]["error_overlinking"] - if "error_overdepending" in self.meta.get("build", {}): + if "error_overdepending" in self.get_section("build"): self.config.error_overdepending = self.meta["build"]["error_overdepending"] self.validate_features() self.ensure_no_pip_requirements() def ensure_no_pip_requirements(self): - keys = "requirements/build", "requirements/run", "test/requires" - for key in keys: + for key in ("requirements/build", "requirements/run", "test/requires"): if any(hasattr(item, "keys") for item in (self.get_value(key) or [])): raise ValueError( "Dictionaries are not supported as values in requirements sections" @@ -1217,15 +1291,13 @@ def ensure_no_pip_requirements(self): def append_requirements(self): """For dynamic determination of build or run reqs, based on configuration""" - reqs = self.meta.get("requirements", {}) - run_reqs = reqs.get("run", []) + run_reqs = self.meta.setdefault("requirements", {}).setdefault("run", []) if ( - bool(self.get_value("build/osx_is_app", False)) + self.get_value("build/osx_is_app", False) and self.config.platform == "osx" + and "python.app" not in run_reqs ): - if "python.app" not in run_reqs: - run_reqs.append("python.app") - self.meta["requirements"] = reqs + run_reqs.append("python.app") def parse_until_resolved( self, allow_no_other_outputs=False, bypass_env_check=False @@ -1260,8 +1332,8 @@ def parse_until_resolved( bypass_env_check=bypass_env_check, ) sys.exit( - "Undefined Jinja2 variables remain ({}). Please enable " - "source downloading and try again.".format(self.undefined_jinja_vars) + f"Undefined Jinja2 variables remain ({self.undefined_jinja_vars}). Please enable " + "source downloading and try again." ) # always parse again at the end, too. @@ -1275,9 +1347,11 @@ def parse_until_resolved( @classmethod def fromstring(cls, metadata, config=None, variant=None): m = super().__new__(cls) - if not config: - config = Config() - m.meta = parse(metadata, config=config, path="", variant=variant) + m.path = "" + m._meta_path = "" + m.requirements_path = "" + config = config or Config(variant=variant) + m.meta = parse(metadata, config=config, path="") m.config = config m.parse_again(permit_undefined_jinja=True) return m @@ -1292,18 +1366,43 @@ def fromdict(cls, metadata, config=None, variant=None): m._meta_path = "" m.requirements_path = "" m.meta = sanitize(metadata) - - if not config: - config = Config(variant=variant) - - m.config = config + m.config = config or Config(variant=variant) m.undefined_jinja_vars = [] m.final = False - return m - def get_section(self, section): - return self.meta.get(section, {}) + @overload + def get_section(self, section: Literal["source", "outputs"]) -> list[dict]: ... + + @overload + def get_section( + self, + section: Literal[ + "package", + "build", + "requirements", + "app", + "test", + "about", + "extra", + ], + ) -> dict: ... + + def get_section(self, name): + section = self.meta.get(name) + if name in OPTIONALLY_ITERABLE_FIELDS: + if not section: + return [] + elif isinstance(section, dict): + return [section] + elif not isinstance(section, list): + raise ValueError(f"Expected {name} to be a list") + else: + if not section: + return {} + elif not isinstance(section, dict): + raise ValueError(f"Expected {name} to be a dict") + return section def get_value(self, name, default=None, autotype=True): """ @@ -1323,7 +1422,9 @@ def get_value(self, name, default=None, autotype=True): index = None elif len(names) == 3: section, index, key = names - assert section == "source", "Section is not a list: " + section + assert section in OPTIONALLY_ITERABLE_FIELDS, ( + "Section is not a list: " + section + ) index = int(index) # get correct default @@ -1345,7 +1446,7 @@ def get_value(self, name, default=None, autotype=True): ) index = 0 - if len(section_data) == 0: + if not section_data: section_data = {} else: section_data = section_data[index] @@ -1377,7 +1478,7 @@ def check_field(key, section): if section == "extra": continue if section not in FIELDS: - raise ValueError("unknown section: %s" % section) + raise ValueError(f"unknown section: {section}") for key_or_dict in submeta: if section in OPTIONALLY_ITERABLE_FIELDS and isinstance( key_or_dict, dict @@ -1388,26 +1489,27 @@ def check_field(key, section): check_field(key_or_dict, section) return True - def name(self, fail_ok=False): - res = self.meta.get("package", {}).get("name", "") - if not res and not fail_ok: - sys.exit("Error: package/name missing in: %r" % self.meta_path) - res = str(res) - if res != res.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % res) - check_bad_chrs(res, "package/name") - return res - - def version(self): - res = str(self.get_value("package/version")) - if res is None: - sys.exit("Error: package/version missing in: %r" % self.meta_path) - check_bad_chrs(res, "package/version") - if self.final and res.startswith("."): + def name(self) -> str: + name = self.get_value("package/name", "") + if not name and self.final: + sys.exit(f"Error: package/name missing in: {self.meta_path!r}") + name = str(name) + if name != name.lower(): + sys.exit(f"Error: package/name must be lowercase, got: {name!r}") + check_bad_chrs(name, "package/name") + return name + + def version(self) -> str: + version = self.get_value("package/version", "") + if not version and not self.get_section("outputs") and self.final: + sys.exit(f"Error: package/version missing in: {self.meta_path!r}") + version = str(version) + check_bad_chrs(version, "package/version") + if self.final and version.startswith("."): raise ValueError( - "Fully-rendered version can't start with period - got %s", res + "Fully-rendered version can't start with period - got %s", version ) - return res + return version def build_number(self): number = self.get_value("build/number") @@ -1427,11 +1529,12 @@ def get_depends_top_and_out(self, typ): meta_requirements = ensure_list(self.get_value("requirements/" + typ, []))[:] req_names = {req.split()[0] for req in meta_requirements if req} extra_reqs = [] - # this is for the edge case of requirements for top-level being also partially defined in a similarly named output + # this is for the edge case of requirements for top-level being + # partially defined in a similarly named output if not self.is_output: matching_output = [ out - for out in self.meta.get("outputs", []) + for out in self.get_section("outputs") if out.get("name") == self.name() ] if matching_output: @@ -1468,7 +1571,7 @@ def ms_depends(self, typ="run"): try: ms = MatchSpec(spec) except AssertionError: - raise RuntimeError("Invalid package specification: %r" % spec) + raise RuntimeError(f"Invalid package specification: {spec!r}") except (AttributeError, ValueError) as e: raise RuntimeError( "Received dictionary as spec. Note that pip requirements are " @@ -1477,7 +1580,7 @@ def ms_depends(self, typ="run"): if ms.name == self.name() and not ( typ == "build" and self.config.host_subdir != self.config.build_subdir ): - raise RuntimeError("%s cannot depend on itself" % self.name()) + raise RuntimeError(f"{self.name()} cannot depend on itself") for name, ver in name_ver_list: if ms.name == name: if self.noarch: @@ -1486,20 +1589,18 @@ def ms_depends(self, typ="run"): for c in "=!@#$%^&*:;\"'\\|<>?/": if c in ms.name: sys.exit( - "Error: bad character '%s' in package name " - "dependency '%s'" % (c, ms.name) + f"Error: bad character '{c}' in package name " + f"dependency '{ms.name}'" ) parts = spec.split() if len(parts) >= 2: if parts[1] in {">", ">=", "=", "==", "!=", "<", "<="}: msg = ( - "Error: bad character '%s' in package version " - "dependency '%s'" % (parts[1], ms.name) + f"Error: bad character '{parts[1]}' in package version " + f"dependency '{ms.name}'" ) if len(parts) >= 3: - msg += "\nPerhaps you meant '{} {}{}'".format( - ms.name, parts[1], parts[2] - ) + msg += f"\nPerhaps you meant '{ms.name} {parts[1]}{parts[2]}'" sys.exit(msg) specs[spec] = ms return list(specs.values()) @@ -1557,9 +1658,17 @@ def get_hash_contents(self): # if dependencies are only 'target_platform' then ignore that. if dependencies == ["target_platform"]: - return {} + hash_contents = {} else: - return {key: self.config.variant[key] for key in dependencies} + hash_contents = {key: self.config.variant[key] for key in dependencies} + + # include virtual packages in run + run_reqs = self.meta.get("requirements", {}).get("run", []) + virtual_pkgs = [req for req in run_reqs if req.startswith("__")] + + # add name -> match spec mapping for virtual packages + hash_contents.update({pkg.split(" ")[0]: pkg for pkg in virtual_pkgs}) + return hash_contents def hash_dependencies(self): """With arbitrary pinning, we can't depend on the build string as done in @@ -1586,7 +1695,6 @@ def build_id(self): raise RuntimeError( f"Couldn't extract raw recipe text for {self.name()} output" ) - raw_recipe_text = self.extract_package_and_build_text() raw_manual_build_string = re.search(r"\s*string:", raw_recipe_text) # user setting their own build string. Don't modify it. if manual_build_string and not ( @@ -1600,7 +1708,7 @@ def build_id(self): out = build_string_from_metadata(self) if self.config.filename_hashing and self.final: hash_ = self.hash_dependencies() - if not re.findall("h[0-9a-f]{%s}" % self.config.hash_length, out): + if not re.findall(f"h[0-9a-f]{{{self.config.hash_length}}}", out): ret = out.rsplit("_", 1) try: int(ret[0]) @@ -1610,14 +1718,14 @@ def build_id(self): if len(ret) > 1: out = "_".join([out] + ret[1:]) else: - out = re.sub("h[0-9a-f]{%s}" % self.config.hash_length, hash_, out) + out = re.sub(f"h[0-9a-f]{{{self.config.hash_length}}}", hash_, out) return out def dist(self): return f"{self.name()}-{self.version()}-{self.build_id()}" def pkg_fn(self): - return "%s.tar.bz2" % self.dist() + return f"{self.dist()}.tar.bz2" def is_app(self): return bool(self.get_value("app/entry")) @@ -1625,7 +1733,9 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % md5_file(join(self.path, self.get_value("app/icon"))) + d["icon"] = "{}.png".format( + compute_sum(join(self.path, self.get_value("app/icon")), "md5") + ) for field, key in [ ("app/entry", "app_entry"), @@ -1696,7 +1806,7 @@ def has_prefix_files(self): ret = ensure_list(self.get_value("build/has_prefix_files", [])) if not isinstance(ret, list): raise RuntimeError("build/has_prefix_files should be a list of paths") - if sys.platform == "win32": + if on_win: if any("\\" in i for i in ret): raise RuntimeError( "build/has_prefix_files paths must use / " @@ -1706,18 +1816,20 @@ def has_prefix_files(self): def ignore_prefix_files(self): ret = self.get_value("build/ignore_prefix_files", False) - if type(ret) not in (list, bool): + if not isinstance(ret, (list, bool)): raise RuntimeError( "build/ignore_prefix_files should be boolean or a list of paths " "(optionally globs)" ) - if sys.platform == "win32": - if type(ret) is list and any("\\" in i for i in ret): + if on_win: + if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/ignore_prefix_files paths must use / " "as the path delimiter on Windows" ) - return expand_globs(ret, self.config.host_prefix) if type(ret) is list else ret + return ( + expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret + ) def always_include_files(self): files = ensure_list(self.get_value("build/always_include_files", [])) @@ -1736,21 +1848,23 @@ def ignore_verify_codes(self): def binary_relocation(self): ret = self.get_value("build/binary_relocation", True) - if type(ret) not in (list, bool): + if not isinstance(ret, (list, bool)): raise RuntimeError( "build/binary_relocation should be boolean or a list of paths " "(optionally globs)" ) - if sys.platform == "win32": - if type(ret) is list and any("\\" in i for i in ret): + if on_win: + if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/binary_relocation paths must use / " "as the path delimiter on Windows" ) - return expand_globs(ret, self.config.host_prefix) if type(ret) is list else ret + return ( + expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret + ) - def include_recipe(self): - return self.get_value("build/include_recipe", True) + def include_recipe(self) -> bool: + return bool(self.get_value("build/include_recipe", True)) def binary_has_prefix_files(self): ret = ensure_list(self.get_value("build/binary_has_prefix_files", [])) @@ -1758,7 +1872,7 @@ def binary_has_prefix_files(self): raise RuntimeError( "build/binary_has_prefix_files should be a list of paths" ) - if sys.platform == "win32": + if on_win: if any("\\" in i for i in ret): raise RuntimeError( "build/binary_has_prefix_files paths must use / " @@ -1766,8 +1880,8 @@ def binary_has_prefix_files(self): ) return expand_globs(ret, self.config.host_prefix) - def skip(self): - return self.get_value("build/skip", False) + def skip(self) -> bool: + return bool(self.get_value("build/skip", False)) def _get_contents( self, @@ -1798,10 +1912,10 @@ def _get_contents( with open(self.meta_path) as fd: return fd.read() - from conda_build.jinja_context import ( - context_processor, - UndefinedNeverFail, + from .jinja_context import ( FilteredLoader, + UndefinedNeverFail, + context_processor, ) path, filename = os.path.split(self.meta_path) @@ -1831,8 +1945,10 @@ def _get_contents( loader = FilteredLoader(jinja2.ChoiceLoader(loaders), config=self.config) env = jinja2.Environment(loader=loader, undefined=undefined_type) - env.globals.update(ns_cfg(self.config)) - env.globals.update(environ.get_dict(m=self, skip_build_id=skip_build_id)) + from .environ import get_dict + + env.globals.update(get_selectors(self.config)) + env.globals.update(get_dict(m=self, skip_build_id=skip_build_id)) env.globals.update({"CONDA_BUILD_STATE": "RENDER"}) env.globals.update( context_processor( @@ -1878,9 +1994,7 @@ def _get_contents( if "'None' has not attribute" in str(ex): ex = "Failed to run jinja context function" sys.exit( - "Error: Failed to render jinja template in {}:\n{}".format( - self.meta_path, str(ex) - ) + f"Error: Failed to render jinja template in {self.meta_path}:\n{str(ex)}" ) finally: if "CONDA_BUILD_STATE" in os.environ: @@ -1904,9 +2018,11 @@ def __repr__(self): @property def meta_path(self): - meta_path = self._meta_path or self.meta.get("extra", {}).get( - "parent_recipe", {} - ).get("path", "") + meta_path = ( + self._meta_path + # get the parent recipe path if this is a subpackage + or self.get_value("extra/parent_recipe", {}).get("path", "") + ) if meta_path and os.path.basename(meta_path) != self._meta_name: meta_path = os.path.join(meta_path, self._meta_name) return meta_path @@ -1927,12 +2043,21 @@ def uses_regex_in_meta(self): meta_text = UnicodeDammit(f.read()).unicode_markup return "load_file_regex" in meta_text + @property + def uses_load_file_data_in_meta(self): + meta_text = "" + if self.meta_path: + with open(self.meta_path, "rb") as f: + meta_text = UnicodeDammit(f.read()).unicode_markup + return "load_file_data" in meta_text + @property def needs_source_for_render(self): return ( self.uses_vcs_in_meta or self.uses_setup_py_in_meta or self.uses_regex_in_meta + or self.uses_load_file_data_in_meta ) @property @@ -1945,7 +2070,7 @@ def uses_jinja(self): return len(matches) > 0 @property - def uses_vcs_in_meta(self): + def uses_vcs_in_meta(self) -> Literal["git", "svn", "mercurial"] | None: """returns name of vcs used if recipe contains metadata associated with version control systems. If this metadata is present, a download/copy will be forced in parse_or_try_download. """ @@ -1957,7 +2082,7 @@ def uses_vcs_in_meta(self): meta_text = UnicodeDammit(f.read()).unicode_markup for _vcs in vcs_types: matches = re.findall(rf"{_vcs.upper()}_[^\.\s\'\"]+", meta_text) - if len(matches) > 0 and _vcs != self.meta["package"]["name"]: + if len(matches) > 0 and _vcs != self.get_value("package/name"): if _vcs == "hg": _vcs = "mercurial" vcs = _vcs @@ -1965,7 +2090,7 @@ def uses_vcs_in_meta(self): return vcs @property - def uses_vcs_in_build(self): + def uses_vcs_in_build(self) -> Literal["git", "svn", "mercurial"] | None: # TODO :: Re-work this. Is it even useful? We can declare any vcs in our build deps. build_script = "bld.bat" if on_win else "build.sh" build_script = os.path.join(self.path, build_script) @@ -1984,15 +2109,18 @@ def uses_vcs_in_build(self): build_script, flags=re.IGNORECASE, ) - if len(matches) > 0 and vcs != self.meta["package"]["name"]: + if len(matches) > 0 and vcs != self.get_value("package/name"): if vcs == "hg": vcs = "mercurial" return vcs return None def get_recipe_text( - self, extract_pattern=None, force_top_level=False, apply_selectors=True - ): + self, + extract_pattern: str | None = None, + force_top_level: bool = False, + apply_selectors: bool = True, + ) -> str: meta_path = self.meta_path if meta_path: recipe_text = read_meta_file(meta_path) @@ -2001,14 +2129,14 @@ def get_recipe_text( self.name(), getattr(self, "type", None) ) else: - from conda_build.render import output_yaml + from .render import output_yaml recipe_text = output_yaml(self) recipe_text = _filter_recipe_text(recipe_text, extract_pattern) if apply_selectors: recipe_text = select_lines( recipe_text, - ns_cfg(self.config), + get_selectors(self.config), variants_in_place=bool(self.config.variant), ) return recipe_text.rstrip() @@ -2086,15 +2214,14 @@ def extract_single_output_text( return output @property - def numpy_xx(self): + def numpy_xx(self) -> bool: """This is legacy syntax that we need to support for a while. numpy x.x means "pin run as build" for numpy. It was special-cased to only numpy.""" text = self.extract_requirements_text() - uses_xx = bool(numpy_xx_re.search(text)) - return uses_xx + return bool(numpy_xx_re.search(text)) @property - def uses_numpy_pin_compatible_without_xx(self): + def uses_numpy_pin_compatible_without_xx(self) -> tuple[bool, bool]: text = self.extract_requirements_text() compatible_search = numpy_compatible_re.search(text) max_pin_search = None @@ -2156,24 +2283,20 @@ def noarch(self): return self.get_value("build/noarch") @noarch.setter - def noarch(self, value): - build = self.meta.get("build", {}) - build["noarch"] = value - self.meta["build"] = build + def noarch(self, value: str | None) -> None: + self.meta.setdefault("build", {})["noarch"] = value if not self.noarch_python and not value: self.config.reset_platform() elif value: self.config.host_platform = "noarch" @property - def noarch_python(self): - return self.get_value("build/noarch_python") + def noarch_python(self) -> bool: + return bool(self.get_value("build/noarch_python")) @noarch_python.setter - def noarch_python(self, value): - build = self.meta.get("build", {}) - build["noarch_python"] = value - self.meta["build"] = build + def noarch_python(self, value: bool) -> None: + self.meta.setdefault("build", {})["noarch_python"] = value if not self.noarch and not value: self.config.reset_platform() elif value: @@ -2196,7 +2319,7 @@ def variant_in_source(self): # constrain the stored variants to only this version in the output # variant mapping if re.search( - r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, self.extract_source_text() + rf"\s*\{{\{{\s*{key}\s*(?:.*?)?\}}\}}", self.extract_source_text() ): return True return False @@ -2207,9 +2330,8 @@ def pin_depends(self): @property def source_provided(self): - return not bool(self.meta.get("source")) or ( - os.path.isdir(self.config.work_dir) - and len(os.listdir(self.config.work_dir)) > 0 + return not self.get_section("source") or ( + os.path.isdir(self.config.work_dir) and os.listdir(self.config.work_dir) ) def reconcile_metadata_with_output_dict(self, output_metadata, output_dict): @@ -2358,9 +2480,7 @@ def append_parent_metadata(self, out_metadata): def get_reduced_variant_set(self, used_variables): # reduce variable space to limit work we need to do - full_collapsed_variants = variants.list_of_dicts_to_dict_of_lists( - self.config.variants - ) + full_collapsed_variants = list_of_dicts_to_dict_of_lists(self.config.variants) reduced_collapsed_variants = full_collapsed_variants.copy() reduce_keys = set(self.config.variants[0].keys()) - set(used_variables) @@ -2392,21 +2512,21 @@ def get_reduced_variant_set(self, used_variables): # save only one element from this key reduced_collapsed_variants[key] = utils.ensure_list(next(iter(values))) - out = variants.dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) + out = dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) return out def get_output_metadata_set( self, - permit_undefined_jinja=False, - permit_unsatisfiable_variants=False, - bypass_env_check=False, - ): - from conda_build.source import provide + permit_undefined_jinja: bool = False, + permit_unsatisfiable_variants: bool = False, + bypass_env_check: bool = False, + ) -> list[tuple[dict[str, Any], MetaData]]: + from .source import provide out_metadata_map = {} if self.final: - outputs = get_output_dicts_from_metadata(self)[0] - output_tuples = [(outputs, self)] + outputs = get_output_dicts_from_metadata(self) + output_tuples = [(outputs[0], self)] else: all_output_metadata = OrderedDict() @@ -2451,20 +2571,18 @@ def get_output_metadata_set( # also refine this collection as each output metadata object is # finalized - see the finalize_outputs_pass function all_output_metadata[ - ( - out_metadata.name(), - HashableDict( - { - k: copy.deepcopy(out_metadata.config.variant[k]) - for k in out_metadata.get_used_vars() - } - ), - ) + out_metadata.name(), + deepfreeze( + { + k: out_metadata.config.variant[k] + for k in out_metadata.get_used_vars() + } + ), ] = (out, out_metadata) - out_metadata_map[HashableDict(out)] = out_metadata - ref_metadata.other_outputs = ( - out_metadata.other_outputs - ) = all_output_metadata + out_metadata_map[deepfreeze(out)] = out_metadata + ref_metadata.other_outputs = out_metadata.other_outputs = ( + all_output_metadata + ) except SystemExit: if not permit_undefined_jinja: raise @@ -2488,12 +2606,7 @@ def get_output_metadata_set( ): conda_packages[ m.name(), - HashableDict( - { - k: copy.deepcopy(m.config.variant[k]) - for k in m.get_used_vars() - } - ), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (output_d, m) elif output_d.get("type") == "wheel": if not output_d.get("requirements", {}).get("build") or not any( @@ -2505,7 +2618,7 @@ def get_output_metadata_set( ) output_d["requirements"] = output_d.get("requirements", {}) output_d["requirements"]["build"] = build_reqs - m.meta["requirements"] = m.meta.get("requirements", {}) + m.meta["requirements"] = m.get_section("requirements") m.meta["requirements"]["build"] = build_reqs non_conda_packages.append((output_d, m)) else: @@ -2530,7 +2643,7 @@ def get_output_metadata_set( # Sanity check: if any exact pins of any subpackages, make sure that they match ensure_matching_hashes(conda_packages) final_conda_packages = [] - for (out_d, m) in conda_packages.values(): + for out_d, m in conda_packages.values(): # We arbitrarily mark all output metadata as final, regardless # of if it truly is or not. This is done to add sane hashes # to unfinalizable packages, so that they are differentiable @@ -2543,21 +2656,14 @@ def get_output_metadata_set( return output_tuples def get_loop_vars(self): - _variants = ( - self.config.input_variants - if hasattr(self.config, "input_variants") - else self.config.variants - ) - return variants.get_vars(_variants, loop_only=True) + return get_vars(getattr(self.config, "input_variants", self.config.variants)) def get_used_loop_vars(self, force_top_level=False, force_global=False): - return { - var - for var in self.get_used_vars( - force_top_level=force_top_level, force_global=force_global - ) - if var in self.get_loop_vars() - } + loop_vars = self.get_loop_vars() + used_vars = self.get_used_vars( + force_top_level=force_top_level, force_global=force_global + ) + return set(loop_vars).intersection(used_vars) def get_rendered_recipe_text( self, permit_undefined_jinja=False, extract_pattern=None @@ -2630,11 +2736,7 @@ def get_used_vars(self, force_top_level=False, force_global=False): global used_vars_cache recipe_dir = self.path - # `HashableDict` does not handle lists of other dictionaries correctly. Also it - # is constructed inplace, taking references to sub-elements of the input dict - # and thus corrupting it. Also, this was being called in 3 places in this function - # so caching it is probably a good thing. - hashed_variants = HashableDict(copy.deepcopy(self.config.variant)) + hashed_variants = deepfreeze(self.config.variant) if hasattr(self.config, "used_vars"): used_vars = self.config.used_vars elif ( @@ -2737,7 +2839,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): apply_selectors=False, ) - all_used_selectors = variants.find_used_variables_in_text( + all_used_selectors = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=True ) @@ -2746,7 +2848,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): force_global=force_global, apply_selectors=True, ) - all_used_reqs = variants.find_used_variables_in_text( + all_used_reqs = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=False ) @@ -2757,9 +2859,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): if force_global: used = all_used else: - requirements_used = variants.find_used_variables_in_text( - variant_keys, reqs_text - ) + requirements_used = find_used_variables_in_text(variant_keys, reqs_text) outside_reqs_used = all_used - requirements_used requirements_used = trim_build_only_deps(self, requirements_used) @@ -2772,16 +2872,12 @@ def _get_used_vars_build_scripts(self): buildsh = os.path.join(self.path, "build.sh") if os.path.isfile(buildsh): used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, buildsh - ) + find_used_variables_in_shell_script(self.config.variant, buildsh) ) bldbat = os.path.join(self.path, "bld.bat") if self.config.platform == "win" and os.path.isfile(bldbat): used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, bldbat - ) + find_used_variables_in_batch_script(self.config.variant, bldbat) ) return used_vars @@ -2794,44 +2890,41 @@ def _get_used_vars_output_script(self): script = os.path.join(self.path, this_output["script"]) if os.path.splitext(script)[1] == ".sh": used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, script - ) + find_used_variables_in_shell_script(self.config.variant, script) ) elif os.path.splitext(script)[1] == ".bat": used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, script - ) + find_used_variables_in_batch_script(self.config.variant, script) ) else: log = utils.get_logger(__name__) log.warn( - "Not detecting used variables in output script {}; conda-build only knows " - "how to search .sh and .bat files right now.".format(script) + f"Not detecting used variables in output script {script}; conda-build only knows " + "how to search .sh and .bat files right now." ) return used_vars def get_variants_as_dict_of_lists(self): - return variants.list_of_dicts_to_dict_of_lists(self.config.variants) + return list_of_dicts_to_dict_of_lists(self.config.variants) def clean(self): """This ensures that clean is called with the correct build id""" self.config.clean() @property - def activate_build_script(self): - b = self.meta.get("build", {}) or {} - should_activate = b.get("activate_in_script") is not False - return bool(self.config.activate and should_activate) + def activate_build_script(self) -> bool: + return bool( + self.config.activate + and self.get_value("build/activate_in_script") is not False + ) @property - def build_is_host(self): + def build_is_host(self) -> bool: manual_overrides = ( - self.meta.get("build", {}).get("merge_build_host") is True + self.get_value("build/merge_build_host") is True or self.config.build_is_host ) - manually_disabled = self.meta.get("build", {}).get("merge_build_host") is False + manually_disabled = self.get_value("build/merge_build_host") is False return manual_overrides or ( self.config.subdirs_same and not manually_disabled @@ -2882,3 +2975,9 @@ def get_test_deps(self, py_files, pl_files, lua_files, r_files): specs.extend(utils.ensure_list(self.config.extra_deps)) return specs + + +class MetaDataTuple(NamedTuple): + metadata: MetaData + need_download: bool + need_reparse: bool diff --git a/conda_build/metapackage.py b/conda_build/metapackage.py index fd993c1236..5c7b57c7b5 100644 --- a/conda_build/metapackage.py +++ b/conda_build/metapackage.py @@ -1,29 +1,40 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from collections import defaultdict -from conda_build.config import Config -from conda_build.metadata import MetaData +from .config import Config +from .metadata import MetaData -def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, - dependencies=(), home=None, license_name=None, summary=None, config=None): - # local import to avoid circular import, we provid create_metapackage in api - from conda_build.api import build + +def create_metapackage( + name, + version, + entry_points=(), + build_string=None, + build_number=0, + dependencies=(), + home=None, + license_name=None, + summary=None, + config=None, +): + # local import to avoid circular import, we provide create_metapackage in api + from .api import build if not config: config = Config() d = defaultdict(dict) - d['package']['name'] = name - d['package']['version'] = version - d['build']['number'] = build_number - d['build']['entry_points'] = entry_points + d["package"]["name"] = name + d["package"]["version"] = version + d["build"]["number"] = build_number + d["build"]["entry_points"] = entry_points # MetaData does the auto stuff if the build string is None - d['build']['string'] = build_string - d['requirements']['run'] = dependencies - d['about']['home'] = home - d['about']['license'] = license_name - d['about']['summary'] = summary + d["build"]["string"] = build_string + d["requirements"]["run"] = dependencies + d["about"]["home"] = home + d["about"]["license"] = license_name + d["about"]["summary"] = summary d = dict(d) m = MetaData.fromdict(d, config=config) config.compute_build_id(m.name(), m.version()) diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index 6823faeea6..1e80fcd2e4 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -4,20 +4,11 @@ import locale import logging import os -from os.path import basename, dirname, isdir, join, isfile import shutil import sys +from os.path import basename, dirname, isfile, join -ISWIN = sys.platform.startswith('win') - - -def _force_dir(dirname): - if not isdir(dirname): - os.makedirs(dirname) - - -def _error_exit(exit_message): - sys.exit("[noarch_python] %s" % exit_message) +from .utils import on_win def rewrite_script(fn, prefix): @@ -26,7 +17,7 @@ def rewrite_script(fn, prefix): noarch pacakges""" # Load and check the source file for not being a binary - src = join(prefix, 'Scripts' if ISWIN else 'bin', fn) + src = join(prefix, "Scripts" if on_win else "bin", fn) encoding = locale.getpreferredencoding() # if default locale is ascii, allow UTF-8 (a reasonably modern ASCII extension) if encoding == "ANSI_X3.4-1968": @@ -35,80 +26,78 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - _error_exit("Noarch package contains binary script: %s" % fn) + sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}") src_mode = os.stat(src).st_mode os.unlink(src) # Get rid of '-script.py' suffix on Windows - if ISWIN and fn.endswith('-script.py'): + if on_win and fn.endswith("-script.py"): fn = fn[:-10] # Rewrite the file to the python-scripts directory - dst_dir = join(prefix, 'python-scripts') - _force_dir(dst_dir) + dst_dir = join(prefix, "python-scripts") + os.makedirs(dst_dir, exist_ok=True) dst = join(dst_dir, fn) - with open(dst, 'w') as fo: + with open(dst, "w") as fo: fo.write(data) os.chmod(dst, src_mode) return fn def handle_file(f, d, prefix): - """Process a file for inclusion in a noarch python package. - """ + """Process a file for inclusion in a noarch python package.""" path = join(prefix, f) # Ignore egg-info and pyc files. - if f.endswith(('.egg-info', '.pyc', '.pyo')): + if f.endswith((".egg-info", ".pyc", ".pyo")): os.unlink(path) - elif f.endswith('.exe') and (isfile(os.path.join(prefix, f[:-4] + '-script.py')) or - basename(f[:-4]) in d['python-scripts']): + elif f.endswith(".exe") and ( + isfile(os.path.join(prefix, f[:-4] + "-script.py")) + or basename(f[:-4]) in d["python-scripts"] + ): os.unlink(path) # this is an entry point with a matching xx-script.py - elif 'site-packages' in f: - nsp = join(prefix, 'site-packages') - _force_dir(nsp) + elif "site-packages" in f: + nsp = join(prefix, "site-packages") + os.makedirs(nsp, exist_ok=True) - g = f[f.find('site-packages'):] + g = f[f.find("site-packages") :] dst = join(prefix, g) dst_dir = dirname(dst) - _force_dir(dst_dir) + os.makedirs(dst_dir, exist_ok=True) shutil.move(path, dst) - d['site-packages'].append(g[14:]) + d["site-packages"].append(g[14:]) # Treat scripts specially with the logic from above - elif f.startswith(('bin/', 'Scripts')): + elif f.startswith(("bin/", "Scripts")): fn = basename(path) fn = rewrite_script(fn, prefix) - d['python-scripts'].append(fn) + d["python-scripts"].append(fn) # Include examples in the metadata doc - elif f.startswith(('Examples/', 'Examples\\')): - d['Examples'].append(f[9:]) + elif f.startswith(("Examples/", "Examples\\")): + d["Examples"].append(f[9:]) # No special treatment for other files # leave them as-is else: # this should be the built-in logging module, not conda-build's stuff, because this file is standalone. log = logging.getLogger(__name__) - log.debug("Don't know how to handle file: %s. Including it as-is." % f) + log.debug(f"Don't know how to handle file: {f}. Including it as-is.") def populate_files(m, files, prefix, entry_point_scripts=None): - d = {'dist': m.dist(), - 'site-packages': [], - 'python-scripts': [], - 'Examples': []} + d = {"dist": m.dist(), "site-packages": [], "python-scripts": [], "Examples": []} # Populate site-package, python-scripts, and Examples into above for f in files: handle_file(f, d, prefix) # Windows path conversion - if ISWIN: - for fns in (d['site-packages'], d['Examples']): + if on_win: + for fns in (d["site-packages"], d["Examples"]): for i, fn in enumerate(fns): - fns[i] = fn.replace('\\', '/') + fns[i] = fn.replace("\\", "/") if entry_point_scripts: for entry_point in entry_point_scripts: @@ -120,28 +109,32 @@ def populate_files(m, files, prefix, entry_point_scripts=None): def transform(m, files, prefix): - bin_dir = join(prefix, 'bin') - _force_dir(bin_dir) + bin_dir = join(prefix, "bin") + os.makedirs(bin_dir, exist_ok=True) - scripts_dir = join(prefix, 'Scripts') - _force_dir(scripts_dir) + scripts_dir = join(prefix, "Scripts") + os.makedirs(scripts_dir, exist_ok=True) name = m.name() # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, '.%s-pre-link.sh' % name), 'wb') as fo: - fo.write(b'''\ + with open(join(bin_dir, f".{name}-pre-link.sh"), "wb") as fo: + fo.write( + b"""\ #!/bin/bash $PREFIX/bin/python $SOURCE_DIR/link.py - ''') + """ + ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'wb') as fo: - fo.write('''\ + with open(join(scripts_dir, f".{name}-pre-link.bat"), "wb") as fo: + fo.write( + """\ @echo off "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" - '''.replace('\n', '\r\n').encode('utf-8')) + """.replace("\n", "\r\n").encode("utf-8") + ) d = populate_files(m, files, prefix) @@ -149,17 +142,17 @@ def transform(m, files, prefix): this_dir = dirname(__file__) # copy in windows exe shims if there are any python-scripts - if d['python-scripts']: - for fn in 'cli-32.exe', 'cli-64.exe': + if d["python-scripts"]: + for fn in "cli-32.exe", "cli-64.exe": shutil.copyfile(join(this_dir, fn), join(prefix, fn)) # Read the local _link.py - with open(join(this_dir, '_link.py')) as fi: + with open(join(this_dir, "_link.py")) as fi: link_code = fi.read() # Write the package metadata, and bumper with code for linking - with open(join(prefix, 'link.py'), 'w') as fo: - fo.write('DATA = ') + with open(join(prefix, "link.py"), "w") as fo: + fo.write("DATA = ") json.dump(d, fo, indent=2, sort_keys=True) - fo.write('\n## END DATA\n\n') + fo.write("\n## END DATA\n\n") fo.write(link_code) diff --git a/conda_build/os_utils/elf.py b/conda_build/os_utils/elf.py index e8b2386f55..5fc37e772e 100644 --- a/conda_build/os_utils/elf.py +++ b/conda_build/os_utils/elf.py @@ -1,28 +1,30 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import sys -from os.path import islink, isfile - +from os.path import isfile, islink # extensions which are assumed to belong to non-ELF files NO_EXT = ( - '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', - '.xml', '.png', '.jpg', '.gif', - '.o' # ELF but not what we are looking for + ".py", + ".pyc", + ".pyo", + ".h", + ".a", + ".c", + ".txt", + ".html", + ".xml", + ".png", + ".jpg", + ".gif", + ".o", # ELF but not what we are looking for ) -MAGIC = b'\x7fELF' +MAGIC = b"\x7fELF" def is_elf(path): if path.endswith(NO_EXT) or islink(path) or not isfile(path): return False - with open(path, 'rb') as fi: + with open(path, "rb") as fi: head = fi.read(4) return bool(head == MAGIC) - - -if __name__ == '__main__': - if sys.platform.startswith('linux'): - for path in '/usr/bin/ls', '/etc/mtab': - print(path, is_elf(path)) diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 3e5ea52994..3ea8216a53 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -2,11 +2,12 @@ # SPDX-License-Identifier: BSD-3-Clause import os import stat -import sys -from os.path import isfile, join, expanduser +from glob import glob +from os.path import expanduser, isfile, join -from conda_build.conda_interface import root_dir -from glob2 import glob +from conda.base.context import context + +from ..utils import on_win def find_executable(executable, prefix=None, all_matches=False): @@ -14,26 +15,32 @@ def find_executable(executable, prefix=None, all_matches=False): # in other code global dir_paths result = None - if sys.platform == 'win32': - dir_paths = [join(root_dir, 'Scripts'), - join(root_dir, 'Library\\mingw-w64\\bin'), - join(root_dir, 'Library\\usr\\bin'), - join(root_dir, 'Library\\bin'), ] + if on_win: + dir_paths = [ + join(context.root_prefix, "Scripts"), + join(context.root_prefix, "Library\\mingw-w64\\bin"), + join(context.root_prefix, "Library\\usr\\bin"), + join(context.root_prefix, "Library\\bin"), + ] if prefix: - dir_paths[0:0] = [join(prefix, 'Scripts'), - join(prefix, 'Library\\mingw-w64\\bin'), - join(prefix, 'Library\\usr\\bin'), - join(prefix, 'Library\\bin'), ] + dir_paths[0:0] = [ + join(prefix, "Scripts"), + join(prefix, "Library\\mingw-w64\\bin"), + join(prefix, "Library\\usr\\bin"), + join(prefix, "Library\\bin"), + ] else: - dir_paths = [join(root_dir, 'bin'), ] + dir_paths = [ + join(context.root_prefix, "bin"), + ] if prefix: - dir_paths.insert(0, join(prefix, 'bin')) + dir_paths.insert(0, join(prefix, "bin")) - dir_paths.extend(os.environ['PATH'].split(os.pathsep)) - if sys.platform == 'win32': - exts = ('.exe', '.bat', '') + dir_paths.extend(os.environ["PATH"].split(os.pathsep)) + if on_win: + exts = (".exe", ".bat", "") else: - exts = ('',) + exts = ("",) all_matches_found = [] for dir_path in dir_paths: @@ -41,14 +48,14 @@ def find_executable(executable, prefix=None, all_matches=False): path = expanduser(join(dir_path, executable + ext)) if isfile(path): st = os.stat(path) - if sys.platform == 'win32' or st.st_mode & stat.S_IEXEC: + if on_win or st.st_mode & stat.S_IEXEC: if all_matches: all_matches_found.append(path) else: result = path break - if not result and any([f in executable for f in ('*', '?', '.')]): - matches = glob(os.path.join(dir_path, executable)) + if not result and any([f in executable for f in ("*", "?", ".")]): + matches = glob(os.path.join(dir_path, executable), recursive=True) if matches: if all_matches: all_matches_found.extend(matches) @@ -60,8 +67,10 @@ def find_executable(executable, prefix=None, all_matches=False): return result or all_matches_found -def find_preferably_prefixed_executable(executable, build_prefix=None, all_matches=False): - found = find_executable('*' + executable, build_prefix, all_matches) +def find_preferably_prefixed_executable( + executable, build_prefix=None, all_matches=False +): + found = find_executable("*" + executable, build_prefix, all_matches) if not found: # It is possible to force non-prefixed exes by passing os.sep as the # first character in executable. basename makes this work. diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index e094301e54..84e80b8e90 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -1,124 +1,131 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from functools import lru_cache -import sys +from __future__ import annotations + import re import subprocess -from os.path import join, basename +from functools import lru_cache +from os.path import basename +from pathlib import Path +from typing import TYPE_CHECKING + +from conda.misc import untracked + +from ..utils import on_linux, on_mac +from .macho import otool +from .pyldd import codefile_class, inspect_linkages, machofile -from conda_build.conda_interface import untracked -from conda_build.conda_interface import linked_data +if TYPE_CHECKING: + import os + from typing import Iterable -from conda_build.os_utils.macho import otool -from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile, is_codefile + from conda.models.records import PrefixRecord -LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') -LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') +LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") +LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") def ldd(path): "thin wrapper around ldd" - lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() + lines = subprocess.check_output(["ldd", path]).decode("utf-8").splitlines() res = [] for line in lines: - if '=>' not in line: + if "=>" not in line: continue - assert line[0] == '\t', (path, line) + assert line[0] == "\t", (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: - res.append((m.group(1), 'not found')) + res.append((m.group(1), "not found")) continue - if 'ld-linux' in line: + if "ld-linux" in line: continue - raise RuntimeError("Unexpected output from ldd: %s" % line) + raise RuntimeError(f"Unexpected output from ldd: {line}") return res -def get_linkages(obj_files, prefix, sysroot): - return _get_linkages(tuple(obj_files), prefix, sysroot) +def get_linkages( + obj_files: Iterable[str], + prefix: str | os.PathLike | Path, + sysroot: str, +) -> dict[str, list[tuple[str, str]]]: + return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @lru_cache(maxsize=None) -def _get_linkages(obj_files, prefix, sysroot): - res = {} - - for f in obj_files: - path = join(prefix, f) - # ldd quite often fails on foreign architectures. - ldd_failed = False +def _get_linkages( + obj_files: tuple[str], + prefix: Path, + sysroot: str, +) -> dict[str, list[tuple[str, str]]]: + linkages = {} + for file in obj_files: # Detect the filetype to emulate what the system-native tool does. - klass = codefile_class(path) - if klass == machofile: + path = prefix / file + if codefile_class(path) == machofile: resolve_filenames = False recurse = False else: resolve_filenames = True recurse = True + ldd_emulate = [ + (basename(link), link) + for link in inspect_linkages( + path, + resolve_filenames=resolve_filenames, + sysroot=sysroot, + recurse=recurse, + ) + ] + try: - if sys.platform.startswith('linux'): - res[f] = ldd(path) - elif sys.platform.startswith('darwin'): - links = otool(path) - res[f] = [(basename(line['name']), line['name']) for line in links] + if on_linux: + ldd_computed = ldd(path) + elif on_mac: + ldd_computed = [ + (basename(link["name"]), link["name"]) for link in otool(path) + ] except: - ldd_failed = True - finally: - res_py = inspect_linkages(path, resolve_filenames=resolve_filenames, - sysroot=sysroot, recurse=recurse) - res_py = [(basename(lp), lp) for lp in res_py] - if ldd_failed: - res[f] = res_py - else: - if set(res[f]) != set(res_py): - print("WARNING: pyldd disagrees with ldd/otool. This will not cause any") - print("WARNING: problems for this build, but please file a bug at:") - print("WARNING: https://github.com/conda/conda-build") - print(f"WARNING: and (if possible) attach file {path}") - print("WARNING: \nldd/otool gives:\n{}\npyldd gives:\n{}\n" - .format("\n".join(str(e) for e in res[f]), "\n".join(str(e) - for e in res_py))) - print(f"Diffs\n{set(res[f]) - set(res_py)}") - print(f"Diffs\n{set(res_py) - set(res[f])}") - return res + # ldd quite often fails on foreign architectures, fallback to + ldd_computed = ldd_emulate + + if set(ldd_computed) != set(ldd_emulate): + print("WARNING: pyldd disagrees with ldd/otool. This will not cause any") + print("WARNING: problems for this build, but please file a bug at:") + print("WARNING: https://github.com/conda/conda-build") + print(f"WARNING: and (if possible) attach file {path}") + print("WARNING:") + print(" ldd/otool gives:") + print(" " + "\n ".join(map(str, ldd_computed))) + print(" pyldd gives:") + print(" " + "\n ".join(map(str, ldd_emulate))) + print(f"Diffs\n{set(ldd_computed) - set(ldd_emulate)}") + print(f"Diffs\n{set(ldd_emulate) - set(ldd_computed)}") + + linkages[file] = ldd_computed + return linkages @lru_cache(maxsize=None) -def get_package_files(dist, prefix): - files = [] - if hasattr(dist, 'get'): - files = dist.get('files') - else: - data = linked_data(prefix).get(dist) - if data: - files = data.get('files', []) - return files +def get_package_obj_files( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> list[str]: + return [ + file + for file in prec["files"] + if codefile_class(Path(prefix, file), skip_symlinks=True) + ] @lru_cache(maxsize=None) -def get_package_obj_files(dist, prefix): - res = [] - files = get_package_files(dist, prefix) - for f in files: - path = join(prefix, f) - if is_codefile(path): - res.append(f) - - return res - - -@lru_cache(maxsize=None) -def get_untracked_obj_files(prefix): - res = [] - files = untracked(prefix) - for f in files: - path = join(prefix, f) - if is_codefile(path): - res.append(f) - - return res +def get_untracked_obj_files(prefix: str | os.PathLike | Path) -> list[str]: + return [ + file + for file in untracked(str(prefix)) + if codefile_class(Path(prefix, file), skip_symlinks=True) + ] diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index ba7df48b74..d6ee2841d6 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -1,40 +1,46 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -try: - from collections.abc import Hashable -except ImportError: - from collections.abc import Hashable -from functools import partial -import glob2 +from __future__ import annotations + import hashlib import json import os -from subprocess import Popen, PIPE import struct -import sys import threading +from collections.abc import Hashable +from fnmatch import fnmatch +from functools import partial +from pathlib import Path +from subprocess import PIPE, Popen + +from conda.models.version import VersionOrder +from ..utils import on_mac, on_win, rec_glob +from .external import find_executable + +# lief cannot handle files it doesn't know about gracefully # TODO :: Remove all use of pyldd # Currently we verify the output of each against the other +from .pyldd import DLLfile, EXEfile, elffile, machofile from .pyldd import inspect_linkages as inspect_linkages_pyldd -# lief cannot handle files it doesn't know about gracefully -from .pyldd import codefile_type as codefile_type_pyldd -from .external import find_executable -codefile_type = codefile_type_pyldd -have_lief = False try: import lief - have_lief = True -except: - pass - -def is_string(s): + lief.logging.disable() + have_lief = True + try: + PE_HEADER_CHARACTERISTICS = lief.PE.Header.CHARACTERISTICS + except AttributeError: + # Fallback for lief<0.14. + PE_HEADER_CHARACTERISTICS = lief.PE.HEADER_CHARACTERISTICS try: - return isinstance(s, basestring) - except NameError: - return isinstance(s, str) + EXE_FORMATS = lief.Binary.FORMATS + except AttributeError: + # Fallback for lief<0.14. + EXE_FORMATS = lief.EXE_FORMATS +except ImportError: + have_lief = False # Some functions can operate on either file names @@ -42,21 +48,22 @@ def is_string(s): # these are to be avoided, or if not avoided they # should be passed a binary when possible as that # will prevent having to parse it multiple times. -def ensure_binary(file): - if not is_string(file): +def ensure_binary( + file: str | os.PathLike | Path | lief.Binary | None, +) -> lief.Binary | None: + if isinstance(file, lief.Binary): return file - else: - try: - if not os.path.exists(file): - return [] - return lief.parse(file) - except: - print(f'WARNING: liefldd: failed to ensure_binary({file})') - return None + elif not file or not Path(file).exists(): + return None + try: + return lief.parse(str(file)) + except BaseException: + print(f"WARNING: liefldd: failed to ensure_binary({file!r})") + return None def nm(filename): - """ Return symbols from *filename* binary """ + """Return symbols from *filename* binary""" done = False try: binary = lief.parse(filename) # Build an abstract binary @@ -73,29 +80,35 @@ def nm(filename): print("No symbols found") -def codefile_type_liefldd(file, skip_symlinks=True): - binary = ensure_binary(file) - result = None - if binary: - if binary.format == lief.EXE_FORMATS.PE: - if lief.PE.DLL_CHARACTERISTICS: - if binary.header.characteristics & lief.PE.HEADER_CHARACTERISTICS.DLL: - result = 'DLLfile' - else: - result = 'EXEfile' - elif binary.format == lief.EXE_FORMATS.MACHO: - result = 'machofile' - elif binary.format == lief.EXE_FORMATS.ELF: - result = 'elffile' - return result +if have_lief: + def codefile_class( + path: str | os.PathLike | Path, + skip_symlinks: bool = False, + ) -> type[DLLfile | EXEfile | machofile | elffile] | None: + # same signature as conda.os_utils.pyldd.codefile_class + if not (binary := ensure_binary(path)): + return None + elif ( + binary.format == EXE_FORMATS.PE + and PE_HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list + ): + return DLLfile + elif binary.format == EXE_FORMATS.PE: + return EXEfile + elif binary.format == EXE_FORMATS.MACHO: + return machofile + elif binary.format == EXE_FORMATS.ELF: + return elffile + else: + return None -if have_lief: - codefile_type = codefile_type_liefldd +else: + from .pyldd import codefile_class def _trim_sysroot(sysroot): - while sysroot.endswith('/') or sysroot.endswith('\\'): + while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] return sysroot @@ -104,28 +117,35 @@ def get_libraries(file): result = [] binary = ensure_binary(file) if binary: - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: result = binary.libraries else: - result = [lib if is_string(lib) else lib.name for lib in binary.libraries] + result = [ + lib if isinstance(lib, str) else lib.name for lib in binary.libraries + ] # LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that. binary_name = None - if binary.format == lief.EXE_FORMATS.MACHO: - binary_name = [command.name for command in binary.commands - if command.command == lief.MachO.LOAD_COMMAND_TYPES.ID_DYLIB] + if binary.format == EXE_FORMATS.MACHO: + binary_name = [ + command.name + for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.ID_DYLIB + ] binary_name = binary_name[0] if len(binary_name) else None - result = [from_os_varnames(binary.format, None, lib) for lib in result - if not binary_name or lib != binary_name] + result = [ + from_os_varnames(binary.format, None, lib) + for lib in result + if not binary_name or lib != binary_name + ] return result def _get_elf_rpathy_thing(binary, attribute, dyn_tag): dynamic_entries = binary.dynamic_entries - rpaths_colons = [getattr(e, attribute) - for e in dynamic_entries if e.tag == dyn_tag] + rpaths_colons = [getattr(e, attribute) for e in dynamic_entries if e.tag == dyn_tag] rpaths = [] for rpath in rpaths_colons: - rpaths.extend(rpath.split(':')) + rpaths.extend(rpath.split(":")) return rpaths @@ -133,27 +153,32 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat dynamic_entries = binary.dynamic_entries changed = False for e in dynamic_entries: - if (set_runpath and - e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH and - glob2.fnmatch.fnmatch(e.runpath, old_matching) and - e.runpath != new_rpath): + if ( + set_runpath + and e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH + and fnmatch(e.runpath, old_matching) + and e.runpath != new_rpath + ): e.runpath = new_rpath changed = True - elif (set_rpath and - e.tag == lief.ELF.DYNAMIC_TAGS.RPATH and - glob2.fnmatch.fnmatch(e.rpath, old_matching) and - e.rpath != new_rpath): + elif ( + set_rpath + and e.tag == lief.ELF.DYNAMIC_TAGS.RPATH + and fnmatch(e.rpath, old_matching) + and e.rpath != new_rpath + ): e.rpath = new_rpath changed = True return changed if have_lief: + def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): - ''' + """ By raw we mean that no processing is done on them whatsoever. The values are taken directly from LIEF. For anything but Linux, this means an empty list. - ''' + """ binary_format = None binary_type = None @@ -161,19 +186,39 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): rpaths = [] if binary: binary_format = binary.format - if binary_format == lief.EXE_FORMATS.ELF: + if binary_format == EXE_FORMATS.ELF: binary_type = binary.type - if binary_type == lief.ELF.ELF_CLASS.CLASS32 or binary_type == lief.ELF.ELF_CLASS.CLASS64: + if ( + binary_type == lief.ELF.ELF_CLASS.CLASS32 + or binary_type == lief.ELF.ELF_CLASS.CLASS64 + ): rpaths = _get_elf_rpathy_thing(binary, elf_attribute, elf_dyn_tag) - elif (binary_format == lief.EXE_FORMATS.MACHO and - binary.has_rpath and - elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH): - rpaths.extend([command.path for command in binary.commands - if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH]) + elif ( + binary_format == EXE_FORMATS.MACHO + and binary.has_rpath + and elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH + ): + rpaths.extend( + [ + command.path + for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH + ] + ) return rpaths, binary_format, binary_type - get_runpaths_raw = partial(get_rpathy_thing_raw_partial, elf_attribute='runpath', elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RUNPATH) - get_rpaths_raw = partial(get_rpathy_thing_raw_partial, elf_attribute='rpath', elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RPATH) + + get_runpaths_raw = partial( + get_rpathy_thing_raw_partial, + elf_attribute="runpath", + elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RUNPATH, + ) + get_rpaths_raw = partial( + get_rpathy_thing_raw_partial, + elf_attribute="rpath", + elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RPATH, + ) else: + def get_runpaths_raw(file): return [], None, None @@ -182,30 +227,36 @@ def get_rpaths_raw(file): def get_runpaths_or_rpaths_raw(file): - ''' + """ Can be called on all OSes. On linux, if runpaths are present they are returned. - ''' + """ rpaths, binary_format, binary_type = get_runpaths_raw(file) if not len(rpaths): rpaths, _, _ = get_rpaths_raw(file) - rpaths_type = 'rpaths' + rpaths_type = "rpaths" else: - rpaths_type = 'runpaths' + rpaths_type = "runpaths" return rpaths, rpaths_type, binary_format, binary_type def set_rpath(old_matching, new_rpath, file): binary = ensure_binary(file) - if (binary.format == lief.EXE_FORMATS.ELF and - (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): - if _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath=True, set_runpath=False): + if not binary: + return + if binary.format == EXE_FORMATS.ELF and ( + binary.type == lief.ELF.ELF_CLASS.CLASS32 + or binary.type == lief.ELF.ELF_CLASS.CLASS64 + ): + if _set_elf_rpathy_thing( + binary, old_matching, new_rpath, set_rpath=True, set_runpath=False + ): binary.write(file) -def get_rpaths(file, exe_dirname, envroot, windows_root=''): +def get_rpaths(file, exe_dirname, envroot, windows_root=""): rpaths, rpaths_type, binary_format, binary_type = get_runpaths_or_rpaths_raw(file) - if binary_format == lief.EXE_FORMATS.PE: + if binary_format == EXE_FORMATS.PE: # To allow the unix-y rpath code to work we consider # exes as having rpaths of env + CONDA_WINDOWS_PATHS # and consider DLLs as having no rpaths. @@ -214,29 +265,29 @@ def get_rpaths(file, exe_dirname, envroot, windows_root=''): # not to apply them transitively. # https://docs.microsoft.com/en-us/windows/desktop/dlls/dynamic-link-library-search-order if exe_dirname: - rpaths.append(exe_dirname.replace('\\', '/')) + rpaths.append(exe_dirname.replace("\\", "/")) if windows_root: - rpaths.append('/'.join((windows_root, "System32"))) - rpaths.append('/'.join((windows_root, "System32", "downlevel"))) + rpaths.append("/".join((windows_root, "System32"))) + rpaths.append("/".join((windows_root, "System32", "downlevel"))) rpaths.append(windows_root) if envroot: - # and not lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list: + # and not .DLL in binary.header.characteristics_list: rpaths.extend(list(_get_path_dirs(envroot))) - elif binary_format == lief.EXE_FORMATS.MACHO: - rpaths = [rpath.rstrip('/') for rpath in rpaths] + elif binary_format == EXE_FORMATS.MACHO: + rpaths = [rpath.rstrip("/") for rpath in rpaths] return [from_os_varnames(binary_format, binary_type, rpath) for rpath in rpaths] # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot='', arch='native'): - ''' +def _inspect_linkages_this(filename, sysroot="", arch="native"): + """ :param filename: :param sysroot: :param arch: :return: - ''' + """ if not os.path.exists(filename): return None, [], [] @@ -247,127 +298,145 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): # json_data = json.loads(lief.to_json_from_abstract(binary)) json_data = json.loads(lief.to_json(binary)) if json_data: - return filename, json_data['imported_libraries'], json_data['imported_libraries'] + return ( + filename, + json_data["imported_libraries"], + json_data["imported_libraries"], + ) except: - print(f'WARNING: liefldd: failed _inspect_linkages_this({filename})') + print(f"WARNING: liefldd: failed _inspect_linkages_this({filename})") return None, [], [] def to_os_varnames(binary, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - if binary.format == lief.EXE_FORMATS.MACHO: - return input_.replace('$SELFDIR', '@loader_path') \ - .replace('$EXEDIR', '@executable_path') \ - .replace('$RPATH', '@rpath') - elif binary.format == lief.EXE_FORMATS.ELF: + if binary.format == EXE_FORMATS.MACHO: + return ( + input_.replace("$SELFDIR", "@loader_path") + .replace("$EXEDIR", "@executable_path") + .replace("$RPATH", "@rpath") + ) + elif binary.format == EXE_FORMATS.ELF: if binary.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$SELFDIR', '$ORIGIN') \ - .replace(libdir, '$LIB') + libdir = "/lib" + return input.replace("$SELFDIR", "$ORIGIN").replace(libdir, "$LIB") def from_os_varnames(binary_format, binary_type, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - if binary_format == lief.EXE_FORMATS.MACHO: - return input_.replace('@loader_path', '$SELFDIR') \ - .replace('@executable_path', '$EXEDIR') \ - .replace('@rpath', '$RPATH') - elif binary_format == lief.EXE_FORMATS.ELF: + if binary_format == EXE_FORMATS.MACHO: + return ( + input_.replace("@loader_path", "$SELFDIR") + .replace("@executable_path", "$EXEDIR") + .replace("@rpath", "$RPATH") + ) + elif binary_format == EXE_FORMATS.ELF: if binary_type == lief.ELF.ELF_CLASS.CLASS64: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input_.replace('$ORIGIN', '$SELFDIR') \ - .replace('$LIB', libdir) - elif binary_format == lief.EXE_FORMATS.PE: + libdir = "/lib" + return input_.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir) + elif binary_format == EXE_FORMATS.PE: return input_ # TODO :: Use conda's version of this (or move the constant strings into constants.py) def _get_path_dirs(prefix): - yield '/'.join((prefix,)) - yield '/'.join((prefix, 'Library', 'mingw-w64', 'bin')) - yield '/'.join((prefix, 'Library', 'usr', 'bin')) - yield '/'.join((prefix, 'Library', 'bin')) - yield '/'.join((prefix, 'Scripts')) - yield '/'.join((prefix, 'bin')) + yield "/".join((prefix,)) + yield "/".join((prefix, "Library", "mingw-w64", "bin")) + yield "/".join((prefix, "Library", "usr", "bin")) + yield "/".join((prefix, "Library", "bin")) + yield "/".join((prefix, "Scripts")) + yield "/".join((prefix, "bin")) def get_uniqueness_key(file): binary = ensure_binary(file) - if binary.format == lief.EXE_FORMATS.MACHO: - return binary.name - elif (binary.format == lief.EXE_FORMATS.ELF - and # noqa - (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): + if not binary: + return EXE_FORMATS.UNKNOWN + elif binary.format == EXE_FORMATS.MACHO: + return str(file) + elif binary.format == EXE_FORMATS.ELF and ( # noqa + binary.type == lief.ELF.ELF_CLASS.CLASS32 + or binary.type == lief.ELF.ELF_CLASS.CLASS64 + ): dynamic_entries = binary.dynamic_entries - result = [e.name for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.SONAME] + result = [ + e.name for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.SONAME + ] if result: return result[0] - return binary.name - return binary.name - - -def _get_resolved_location(codefile, - unresolved, - exedir, - selfdir, - rpaths_transitive, - LD_LIBRARY_PATH='', - default_paths=[], - sysroot='', - resolved_rpath=None): - ''' - From `man ld.so` - - When resolving shared object dependencies, the dynamic linker first inspects each dependency - string to see if it contains a slash (this can occur if a shared object pathname containing - slashes was specified at link time). If a slash is found, then the dependency string is - interpreted as a (relative or absolute) pathname, and the shared object is loaded using that - pathname. - - If a shared object dependency does not contain a slash, then it is searched for in the - following order: - - o Using the directories specified in the DT_RPATH dynamic section attribute of the binary - if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. - - o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in - secure-execution mode; see below). in which case it is ignored. - - o Using the directories specified in the DT_RUNPATH dynamic section attribute of the - binary if present. Such directories are searched only to find those objects required - by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, - which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, - which is applied to searches for all children in the dependency tree. - - o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate - shared objects previously found in the augmented library path. If, however, the binary - was linked with the -z nodeflib linker option, shared objects in the default paths are - skipped. Shared objects installed in hardware capability directories (see below) are - preferred to other shared objects. - - o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default - paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was - linked with the -z nodeflib linker option, this step is skipped. - - Returns a tuple of resolved location, rpath_used, in_sysroot - ''' + return str(file) + return str(file) + + +def _get_resolved_location( + codefile, + unresolved, + exedir, + selfdir, + rpaths_transitive, + LD_LIBRARY_PATH="", + default_paths=[], + sysroot="", + resolved_rpath=None, +): + """ + From `man ld.so` + + When resolving shared object dependencies, the dynamic linker first inspects each dependency + string to see if it contains a slash (this can occur if a shared object pathname containing + slashes was specified at link time). If a slash is found, then the dependency string is + interpreted as a (relative or absolute) pathname, and the shared object is loaded using that + pathname. + + If a shared object dependency does not contain a slash, then it is searched for in the + following order: + + o Using the directories specified in the DT_RPATH dynamic section attribute of the binary + if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. + + o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in + secure-execution mode; see below). in which case it is ignored. + + o Using the directories specified in the DT_RUNPATH dynamic section attribute of the + binary if present. Such directories are searched only to find those objects required + by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, + which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, + which is applied to searches for all children in the dependency tree. + + o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate + shared objects previously found in the augmented library path. If, however, the binary + was linked with the -z nodeflib linker option, shared objects in the default paths are + skipped. Shared objects installed in hardware capability directories (see below) are + preferred to other shared objects. + + o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default + paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was + linked with the -z nodeflib linker option, this step is skipped. + + Returns a tuple of resolved location, rpath_used, in_sysroot + """ rpath_result = None found = False - ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(':') - if unresolved.startswith('$RPATH'): - these_rpaths = [resolved_rpath] if resolved_rpath else \ - rpaths_transitive + \ - ld_library_paths + \ - [dp.replace('$SYSROOT', sysroot) for dp in default_paths] + ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(":") + if unresolved.startswith("$RPATH"): + these_rpaths = ( + [resolved_rpath] + if resolved_rpath + else rpaths_transitive + + ld_library_paths + + [dp.replace("$SYSROOT", sysroot) for dp in default_paths] + ) for rpath in these_rpaths: - resolved = unresolved.replace('$RPATH', rpath) \ - .replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + resolved = ( + unresolved.replace("$RPATH", rpath) + .replace("$SELFDIR", selfdir) + .replace("$EXEDIR", exedir) + ) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) if resolved_rpath or exists or exists_sysroot: @@ -377,13 +446,12 @@ def _get_resolved_location(codefile, if not found: # Return the so name so that it can be warned about as missing. return unresolved, None, False - elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): - resolved = unresolved.replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + elif any(a in unresolved for a in ("$SELFDIR", "$EXEDIR")): + resolved = unresolved.replace("$SELFDIR", selfdir).replace("$EXEDIR", exedir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) else: - if unresolved.startswith('/'): + if unresolved.startswith("/"): return unresolved, None, False else: return os.path.join(selfdir, unresolved), None, False @@ -392,8 +460,14 @@ def _get_resolved_location(codefile, # TODO :: Consider returning a tree structure or a dict when recurse is True? -def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, - sysroot='', envroot='', arch='native'): +def inspect_linkages_lief( + filename, + resolve_filenames=True, + recurse=True, + sysroot="", + envroot="", + arch="native", +): # Already seen is partly about implementing single SONAME # rules and its appropriateness on macOS is TBD! already_seen = set() @@ -403,17 +477,27 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, sysroot = _trim_sysroot(sysroot) default_paths = [] - if binary.format == lief.EXE_FORMATS.ELF: + if not binary: + default_paths = [] + elif binary.format == EXE_FORMATS.ELF: if binary.type == lief.ELF.ELF_CLASS.CLASS64: - default_paths = ['$SYSROOT/lib64', '$SYSROOT/usr/lib64', '$SYSROOT/lib', '$SYSROOT/usr/lib'] + default_paths = [ + "$SYSROOT/lib64", + "$SYSROOT/usr/lib64", + "$SYSROOT/lib", + "$SYSROOT/usr/lib", + ] else: - default_paths = ['$SYSROOT/lib', '$SYSROOT/usr/lib'] - elif binary.format == lief.EXE_FORMATS.MACHO: - default_paths = ['$SYSROOT/usr/lib'] - elif binary.format == lief.EXE_FORMATS.PE: + default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"] + elif binary.format == EXE_FORMATS.MACHO: + default_paths = ["$SYSROOT/usr/lib"] + elif binary.format == EXE_FORMATS.PE: # We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in # get_rpaths() instead since we need to carefully control the order. - default_paths = ['$SYSROOT/System32/Wbem', '$SYSROOT/System32/WindowsPowerShell/v1.0'] + default_paths = [ + "$SYSROOT/System32/Wbem", + "$SYSROOT/System32/WindowsPowerShell/v1.0", + ] results = {} rpaths_by_binary = dict() parents_by_filename = dict({filename: None}) @@ -422,27 +506,32 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, todo.pop(0) filename2 = element[0] binary = element[1] + if not binary: + continue uniqueness_key = get_uniqueness_key(binary) if uniqueness_key not in already_seen: parent_exe_dirname = None - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: tmp_filename = filename2 while tmp_filename: - if not parent_exe_dirname and codefile_type(tmp_filename) == 'EXEfile': + if ( + not parent_exe_dirname + and codefile_class(tmp_filename, skip_symlinks=True) + == EXEfile + ): parent_exe_dirname = os.path.dirname(tmp_filename) tmp_filename = parents_by_filename[tmp_filename] else: parent_exe_dirname = exedir # This is a hack for Python on Windows. Sorry. - if '.pyd' in filename2 or (os.sep + 'DLLs' + os.sep) in filename2: - parent_exe_dirname = envroot.replace(os.sep, '/') + '/DLLs' - rpaths_by_binary[filename2] = get_rpaths(binary, - parent_exe_dirname, - envroot.replace(os.sep, '/'), - sysroot) + if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2: + parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs" + rpaths_by_binary[filename2] = get_rpaths( + binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot + ) tmp_filename = filename2 rpaths_transitive = [] - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: rpaths_transitive = rpaths_by_binary[tmp_filename] else: while tmp_filename: @@ -452,37 +541,56 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, if filename2 in libraries: # Happens on macOS, leading to cycles. libraries.remove(filename2) # RPATH is implicit everywhere except macOS, make it explicit to simplify things. - these_orig = [('$RPATH/' + lib if not lib.startswith('/') and not lib.startswith('$') and # noqa - binary.format != lief.EXE_FORMATS.MACHO else lib) - for lib in libraries] + these_orig = [ + ( + "$RPATH/" + lib + if not lib.startswith("/") + and not lib.startswith("$") + and binary.format != EXE_FORMATS.MACHO # noqa + else lib + ) + for lib in libraries + ] for lib, orig in zip(libraries, these_orig): - resolved = _get_resolved_location(binary, - orig, - exedir, - exedir, - rpaths_transitive=rpaths_transitive, - default_paths=default_paths, - sysroot=sysroot) + resolved = _get_resolved_location( + binary, + orig, + exedir, + exedir, + rpaths_transitive=rpaths_transitive, + default_paths=default_paths, + sysroot=sysroot, + ) path_fixed = os.path.normpath(resolved[0]) # Test, randomise case. We only allow for the filename part to be random, and we allow that # only for Windows DLLs. We may need a special case for Lib (from Python) vs lib (from R) # too, but in general we want to enforce case checking as much as we can since even Windows # can be run case-sensitively if the user wishes. # - ''' - if binary.format == lief.EXE_FORMATS.PE: + """ + if binary.format == EXE_FORMATS.PE: import random - path_fixed = os.path.dirname(path_fixed) + os.sep + \ - ''.join(random.choice((str.upper, str.lower))(c) for c in os.path.basename(path_fixed)) + path_fixed = ( + os.path.dirname(path_fixed) + + os.sep + + ''.join( + random.choice((str.upper, str.lower))(c) + for c in os.path.basename(path_fixed) + ) + ) if random.getrandbits(1): path_fixed = path_fixed.replace(os.sep + 'lib' + os.sep, os.sep + 'Lib' + os.sep) else: path_fixed = path_fixed.replace(os.sep + 'Lib' + os.sep, os.sep + 'lib' + os.sep) - ''' + """ if resolve_filenames: - rec = {'orig': orig, 'resolved': path_fixed, 'rpaths': rpaths_transitive} + rec = { + "orig": orig, + "resolved": path_fixed, + "rpaths": rpaths_transitive, + } else: - rec = {'orig': orig, 'rpaths': rpaths_transitive} + rec = {"orig": orig, "rpaths": rpaths_transitive} results[lib] = rec parents_by_filename[resolved[0]] = filename2 if recurse: @@ -492,43 +600,68 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, return results -def get_linkages(filename, resolve_filenames=True, recurse=True, - sysroot='', envroot='', arch='native'): +def get_linkages( + filename, + resolve_filenames=True, + recurse=True, + sysroot="", + envroot="", + arch="native", +): # When we switch to lief, want to ensure these results do not change. # We do not support Windows yet with pyldd. result_pyldd = [] debug = False if not have_lief or debug: - if codefile_type(filename) not in ('DLLfile', 'EXEfile'): - result_pyldd = inspect_linkages_pyldd(filename, resolve_filenames=resolve_filenames, recurse=recurse, - sysroot=sysroot, arch=arch) + codefile = codefile_class(filename, skip_symlinks=True) + if codefile not in (DLLfile, EXEfile): + result_pyldd = inspect_linkages_pyldd( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + arch=arch, + ) if not have_lief: return result_pyldd else: - print(f"WARNING: failed to get_linkages, codefile_type('{filename}')={codefile_type(filename)}") + print( + f"WARNING: failed to get_linkages, codefile_class('{filename}', True)={codefile}" + ) return {} - result_lief = inspect_linkages_lief(filename, resolve_filenames=resolve_filenames, recurse=recurse, - sysroot=sysroot, envroot=envroot, arch=arch) + result_lief = inspect_linkages_lief( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + envroot=envroot, + arch=arch, + ) if debug and result_pyldd and set(result_lief) != set(result_pyldd): - print("WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)". - format(filename, resolve_filenames, recurse, sysroot, envroot, arch, result_lief, result_pyldd)) + print( + f"WARNING: Disagreement in get_linkages({filename=}, " + f"{resolve_filenames=}, {recurse=}, {sysroot=}, {envroot=}, {arch=}):\n" + f" lief: {result_lief}\n" + f"pyldd: {result_pyldd}\n" + " (using lief)" + ) return result_lief -def get_imports(file, arch='native'): +def get_imports(file, arch="native"): binary = ensure_binary(file) return [str(i) for i in binary.imported_functions] def _get_archive_signature(file): try: - with open(file, 'rb') as f: + with open(file, "rb") as f: index = 0 content = f.read(8) - signature, = struct.unpack('<8s', content[index:8]) + (signature,) = struct.unpack("<8s", content[index:8]) return signature, 8 except: - return '', 0 + return "", 0 debug_static_archives = 0 @@ -536,17 +669,17 @@ def _get_archive_signature(file): def is_archive(file): signature, _ = _get_archive_signature(file) - return True if signature == b'!\n' else False + return True if signature == b"!\n" else False def get_static_lib_exports(file): - # file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a' + # file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a' # noqa: E501 # References: # https://github.com/bminor/binutils-gdb/tree/master/bfd/archive.c # https://en.wikipedia.org/wiki/Ar_(Unix) # https://web.archive.org/web/20100314154747/http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx def _parse_ar_hdr(content, index): - ''' + """ 0 16 File identifier ASCII 16 12 File modification timestamp Decimal 28 6 Owner ID Decimal @@ -554,44 +687,51 @@ def _parse_ar_hdr(content, index): 40 8 File mode Octal 48 10 File size in bytes Decimal 58 2 Ending characters 0x60 0x0A - ''' - header_fmt = '<16s 12s 6s 6s 8s 10s 2s' + """ + header_fmt = "<16s 12s 6s 6s 8s 10s 2s" header_sz = struct.calcsize(header_fmt) - name, modified, owner, group, mode, size, ending = \ - struct.unpack(header_fmt, content[index:index + header_sz]) + name, modified, owner, group, mode, size, ending = struct.unpack( + header_fmt, content[index : index + header_sz] + ) try: size = int(size) except: - print(f'ERROR: {name} has non-integral size of {size}') - return index, '', 0, 0, 'INVALID' - name_len = 0 # File data in BSD format archives begin with a name of this length. - if name.startswith(b'#1/'): - typ = 'BSD' + print(f"ERROR: {name} has non-integral size of {size}") + return index, "", 0, 0, "INVALID" + name_len = ( + 0 # File data in BSD format archives begin with a name of this length. + ) + if name.startswith(b"#1/"): + typ = "BSD" name_len = int(name[3:]) - name, = struct.unpack('<' + str(name_len) + 's', content[index + header_sz:index + header_sz + name_len]) - if b'\x00' in name: - name = name[:name.find(b'\x00')] - elif name.startswith(b'//'): - typ = 'GNU_TABLE' - elif name.strip() == b'/': - typ = 'GNU_SYMBOLS' - elif name.startswith(b'/'): - typ = 'GNU' + (name,) = struct.unpack( + "<" + str(name_len) + "s", + content[index + header_sz : index + header_sz + name_len], + ) + if b"\x00" in name: + name = name[: name.find(b"\x00")] + elif name.startswith(b"//"): + typ = "GNU_TABLE" + elif name.strip() == b"/": + typ = "GNU_SYMBOLS" + elif name.startswith(b"/"): + typ = "GNU" else: - typ = 'NORMAL' - if b'/' in name: - name = name[:name.find(b'/')] - # if debug_static_archives: print("index={}, name={}, ending={}, size={}, type={}".format(index, name, ending, size, typ)) + typ = "NORMAL" + if b"/" in name: + name = name[: name.find(b"/")] + # if debug_static_archives: + # print(f"index={index}, name={name}, ending={ending}, size={size}, type={typ}") index += header_sz + name_len return index, name, name_len, size, typ results = [] signature, len_signature = _get_archive_signature(file) - if signature != b'!\n': + if signature != b"!\n": print(f"ERROR: {file} is not an archive") return results - with open(file, 'rb') as f: + with open(file, "rb") as f: if debug_static_archives: print(f"Archive file {file}") index = 0 @@ -605,64 +745,98 @@ def _parse_ar_hdr(content, index): if debug_static_archives: print(f"ar_hdr index = {hex(index)}") index, name, name_len, size, typ = _parse_ar_hdr(content, index) - if typ == 'GNU_SYMBOLS': + if typ == "GNU_SYMBOLS": # Reference: # https://web.archive.org/web/20070924090618/http://www.microsoft.com/msj/0498/hood0498.aspx - nsymbols, = struct.unpack('>I', content[index:index + 4]) + (nsymbols,) = struct.unpack(">I", content[index : index + 4]) # Reference: # https://docs.microsoft.com/en-us/windows/desktop/api/winnt/ns-winnt-_image_file_header offsets = [] for i in range(nsymbols): - offset, = struct.unpack('>I', content[index + 4 + i * 4:index + 4 + (i + 1) * 4]) + (offset,) = struct.unpack( + ">I", content[index + 4 + i * 4 : index + 4 + (i + 1) * 4] + ) offsets.append(offset) - syms = [symname.decode('utf-8') - for symname in content[index + 4 + (nsymbols * 4):index + size].split(b'\x00')[:nsymbols]] + syms = [ + symname.decode("utf-8") + for symname in content[index + 4 + (nsymbols * 4) : index + size].split( + b"\x00" + )[:nsymbols] + ] for i in range(nsymbols): index2, name, name_len, size, typ = _parse_ar_hdr(content, offsets[i]) obj_starts.add(index2) obj_ends.add(offsets[i]) if debug_static_archives: - print(f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2:index2 + 4]}") - elif name.startswith(b'__.SYMDEF'): + print( + f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2:index2 + 4]}" + ) + elif name.startswith(b"__.SYMDEF"): # Reference: # http://www.manpagez.com/man/5/ranlib/ # https://opensource.apple.com/source/cctools/cctools-921/misc/libtool.c.auto.html # https://opensource.apple.com/source/cctools/cctools-921/misc/nm.c.auto.html # https://opensource.apple.com/source/cctools/cctools-921/libstuff/writeout.c # https://developer.apple.com/documentation/kernel/nlist_64/1583944-n_type?language=objc - if b'64' in name: + if b"64" in name: # 2 uint64_t, a string table index and an offset - ranlib_struct_field_fmt = 'Q' - toc_integers_fmt = 'Q' + ranlib_struct_field_fmt = "Q" + toc_integers_fmt = "Q" else: # 2 uint32_t, a string table index and an offset - ranlib_struct_field_fmt = 'I' - toc_integers_fmt = 'I' + ranlib_struct_field_fmt = "I" + toc_integers_fmt = "I" ranlib_struct_sz = struct.calcsize(ranlib_struct_field_fmt) * 2 toc_integers_sz = struct.calcsize(toc_integers_fmt) - size_ranlib_structs, = struct.unpack('<' + toc_integers_fmt, content[index:index + toc_integers_sz]) + (size_ranlib_structs,) = struct.unpack( + "<" + toc_integers_fmt, content[index : index + toc_integers_sz] + ) # Each of the ranlib structures consists of a zero based offset into the next # section (a string table of symbols) and an offset from the beginning of # the archive to the start of the archive file which defines the symbol nsymbols = size_ranlib_structs // 8 - size_string_table, = struct.unpack('<' + toc_integers_fmt, - content[index + toc_integers_sz + (nsymbols * ranlib_struct_sz):index + 4 + 4 + (nsymbols * ranlib_struct_sz)]) + (size_string_table,) = struct.unpack( + "<" + toc_integers_fmt, + content[ + index + toc_integers_sz + (nsymbols * ranlib_struct_sz) : index + + 4 + + 4 + + (nsymbols * ranlib_struct_sz) + ], + ) ranlib_structs = [] ranlib_index = index + (toc_integers_sz * 2) for i in range(nsymbols): - ran_off, ran_strx = struct.unpack('<' + ranlib_struct_field_fmt + ranlib_struct_field_fmt, - content[ranlib_index + (i * ranlib_struct_sz):ranlib_index + ((i + 1) * ranlib_struct_sz)]) + ran_off, ran_strx = struct.unpack( + "<" + ranlib_struct_field_fmt + ranlib_struct_field_fmt, + content[ + ranlib_index + (i * ranlib_struct_sz) : ranlib_index + + ((i + 1) * ranlib_struct_sz) + ], + ) ranlib_structs.append((ran_strx, ran_off)) if debug_static_archives > 1: - print("string_table: start: {} end: {}".format(hex(ranlib_index + (nsymbols * ranlib_struct_sz)), - hex(ranlib_index + (nsymbols * ranlib_struct_sz) + size_string_table))) - string_table = content[ranlib_index + (nsymbols * ranlib_struct_sz):ranlib_index + (nsymbols * ranlib_struct_sz) + size_string_table] - string_table = string_table.decode('utf-8', errors='ignore') + print( + "string_table: start: {} end: {}".format( + hex(ranlib_index + (nsymbols * ranlib_struct_sz)), + hex( + ranlib_index + + (nsymbols * ranlib_struct_sz) + + size_string_table + ), + ) + ) + string_table = content[ + ranlib_index + (nsymbols * ranlib_struct_sz) : ranlib_index + + (nsymbols * ranlib_struct_sz) + + size_string_table + ] + string_table = string_table.decode("utf-8", errors="ignore") syms = [] for i in range(nsymbols): ranlib_struct = ranlib_structs[i] strx, off = ranlib_struct - sym = string_table[strx:strx + string_table[strx:].find('\x00')] + sym = string_table[strx : strx + string_table[strx:].find("\x00")] syms.append(sym) if debug_static_archives > 1: print(f"{syms[i]} :: strx={hex(strx)}, off={hex(off)}") @@ -674,26 +848,28 @@ def _parse_ar_hdr(content, index): obj_starts = sorted(list(obj_starts)) obj_ends = sorted(list(obj_ends))[1:] if debug_static_archives > 1: - print('obj_starts: {}'.format(" ".join(f'0x{o:05x}' for o in obj_starts))) + print("obj_starts: {}".format(" ".join(f"0x{o:05x}" for o in obj_starts))) if debug_static_archives > 1: - print(' obj_ends: {}'.format(" ".join(f'0x{o:05x}' for o in obj_ends))) + print(" obj_ends: {}".format(" ".join(f"0x{o:05x}" for o in obj_ends))) for obj_start, obj_end in zip(obj_starts, obj_ends): - IMAGE_FILE_MACHINE_I386 = 0x014c + IMAGE_FILE_MACHINE_I386 = 0x014C IMAGE_FILE_MACHINE_AMD64 = 0x8664 - MACHINE_TYPE, = struct.unpack(' 0: print(hex(obj_start), hex(obj_end), obj_end - obj_start) if MACHINE_TYPE in (IMAGE_FILE_MACHINE_I386, IMAGE_FILE_MACHINE_AMD64): # 'This file is not a PE binary' (yeah, fair enough, it's a COFF file). # Reported at https://github.com/lief-project/LIEF/issues/233#issuecomment-452580391 try: - obj = lief.PE.parse(raw=content[obj_start:obj_end - 1]) + obj = lief.PE.parse(raw=content[obj_start : obj_end - 1]) except: if debug_static_archives > 0: - print("get_static_lib_exports failed, PECOFF not supported by LIEF nor pyldd.") + print( + "get_static_lib_exports failed, PECOFF not supported by LIEF nor pyldd." + ) pass obj = None - elif MACHINE_TYPE == 0xfacf: + elif MACHINE_TYPE == 0xFACF: obj = lief.parse(raw=content[obj_start:obj_end]) # filename = '/Users/rdonnelly/conda/conda-build/macOS-libpython2.7.a/getbuildinfo.o' @@ -716,7 +892,12 @@ def _parse_ar_hdr(content, index): # if sym.is_function and (sym.exported or sym.is_static): # functions.append(sym.name) functions.extend(get_symbols(obj, defined=True, undefined=False)) - return functions, [[0, 0] for sym in functions], functions, [[0, 0] for sym in functions] + return ( + functions, + [[0, 0] for sym in functions], + functions, + [[0, 0] for sym in functions], + ) def get_static_lib_exports_nope(file): @@ -724,79 +905,99 @@ def get_static_lib_exports_nope(file): def get_static_lib_exports_nm(filename): - nm_exe = find_executable('nm') - if sys.platform == 'win32' and not nm_exe: - nm_exe = 'C:\\msys64\\mingw64\\bin\\nm.exe' + nm_exe = find_executable("nm") + if on_win and not nm_exe: + nm_exe = "C:\\msys64\\mingw64\\bin\\nm.exe" if not nm_exe or not os.path.exists(nm_exe): return None - flags = '-Pg' - if sys.platform == 'darwin': - flags = '-PgUj' + flags = "-Pg" + if on_mac: + flags = "-PgUj" try: - out, _ = Popen([nm_exe, flags, filename], shell=False, - stdout=PIPE).communicate() - results = out.decode('utf-8').replace('\r\n', '\n').splitlines() - results = [r.split(' ')[0] for r in results if ' T ' in r and not r.startswith('.text ')] + out, _ = Popen( + [nm_exe, flags, filename], shell=False, stdout=PIPE + ).communicate() + results = out.decode("utf-8").replace("\r\n", "\n").splitlines() + results = [ + r.split(" ")[0] + for r in results + if " T " in r and not r.startswith(".text ") + ] results.sort() except OSError: # nm may not be available or have the correct permissions, this # should not cause a failure, see gh-3287 - print(f'WARNING: nm: failed to get_exports({filename})') + print(f"WARNING: nm: failed to get_exports({filename})") results = None return results def get_static_lib_exports_dumpbin(filename): - r''' + r""" > dumpbin /SYMBOLS /NOLOGO C:\msys64\mingw64\lib\libasprintf.a > C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.20.27508\bin\Hostx64\x64\dumpbin.exe > 020 00000000 UNDEF notype () External | malloc > vs > 004 00000010 SECT1 notype () External | _ZN3gnu11autosprintfC1EPKcz - ''' - dumpbin_exe = find_executable('dumpbin') + """ # noqa: E501 + dumpbin_exe = find_executable("dumpbin") if not dumpbin_exe: - ''' + """ Oh the fun: https://stackoverflow.com/questions/41106407/programmatically-finding-the-vs2017-installation-directory Nice to see MS avoiding the Windows Registry though, took them a while! Still, let's ignore that, we just want a good dumpbin! - ''' - pfx86 = os.environ['PROGRAMFILES(X86)'] - programs = [p for p in os.listdir(pfx86) if p.startswith("Microsoft Visual Studio")] + """ + pfx86 = os.environ["PROGRAMFILES(X86)"] + programs = [ + p for p in os.listdir(pfx86) if p.startswith("Microsoft Visual Studio") + ] results = [] for p in programs: - from conda_build.utils import rec_glob dumpbin = rec_glob(os.path.join(pfx86, p), ("dumpbin.exe",)) for result in dumpbin: try: - out, _ = Popen([result, filename], shell=False, - stdout=PIPE).communicate() - lines = out.decode('utf-8').splitlines() - version = lines[0].split(' ')[-1] + out, _ = Popen( + [result, filename], shell=False, stdout=PIPE + ).communicate() + lines = out.decode("utf-8").splitlines() + version = lines[0].split(" ")[-1] results.append((result, version)) except: pass - from conda_build.conda_interface import VersionOrder + results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] if not dumpbin_exe: return None - flags = ['/NOLOGO'] + flags = ["/NOLOGO"] exports = [] - for flag in ('/SYMBOLS', '/EXPORTS'): + for flag in ("/SYMBOLS", "/EXPORTS"): try: - out, _ = Popen([dumpbin_exe] + flags + [flag] + [filename], shell=False, - stdout=PIPE).communicate() - results = out.decode('utf-8').splitlines() - if flag == '/EXPORTS': - exports.extend([r.split(' ')[-1] for r in results if r.startswith(' ')]) + out, _ = Popen( + [dumpbin_exe] + flags + [flag] + [filename], shell=False, stdout=PIPE + ).communicate() + results = out.decode("utf-8").splitlines() + if flag == "/EXPORTS": + exports.extend( + [ + r.split(" ")[-1] + for r in results + if r.startswith(" ") + ] + ) else: - exports.extend([r.split(' ')[-1] for r in results if ('External ' in r and 'UNDEF ' not in r)]) + exports.extend( + [ + r.split(" ")[-1] + for r in results + if ("External " in r and "UNDEF " not in r) + ] + ) except OSError: # nm may not be available or have the correct permissions, this # should not cause a failure, see gh-3287 - print(f'WARNING: nm: failed to get_exports({filename})') + print(f"WARNING: nm: failed to get_exports({filename})") exports = None exports.sort() return exports @@ -814,12 +1015,14 @@ def get_static_lib_exports_externally(filename): return res_nm -def get_exports(filename, arch='native', enable_static=False): +def get_exports(filename, arch="native", enable_static=False): result = [] if enable_static and isinstance(filename, str): - if (os.path.exists(filename) and - (filename.endswith('.a') or filename.endswith('.lib')) and - is_archive(filename)) and sys.platform != 'win32': + if ( + os.path.exists(filename) + and (filename.endswith(".a") or filename.endswith(".lib")) + and is_archive(filename) + ) and not on_win: # syms = os.system('nm -g {}'.filename) # on macOS at least: # -PgUj is: @@ -827,16 +1030,18 @@ def get_exports(filename, arch='native', enable_static=False): # g: global (exported) only # U: not undefined # j: name only - if debug_static_archives or sys.platform == 'win32': + if debug_static_archives or on_win: exports = get_static_lib_exports_externally(filename) # Now, our own implementation which does not require nm and can # handle .lib files. - if sys.platform == 'win32': + if on_win: # Sorry, LIEF does not handle COFF (only PECOFF) and object files are COFF. exports2 = exports else: try: - exports2, flags2, exports2_all, flags2_all = get_static_lib_exports(filename) + exports2, flags2, exports2_all, flags2_all = get_static_lib_exports( + filename + ) except: print(f"WARNING :: Failed to get_static_lib_exports({filename})") exports2 = [] @@ -849,10 +1054,17 @@ def get_exports(filename, arch='native', enable_static=False): if debug_static_archives: print(f"errors: {error_count} (-{len(diff1)}, +{len(diff2)})") if debug_static_archives: - print("WARNING :: Disagreement regarding static lib exports in {} between nm (nsyms={}) and lielfldd (nsyms={}):" - .format(filename, len(exports), len(exports2))) - print("** nm.diff(liefldd) [MISSING SYMBOLS] **\n{}".format('\n'.join(diff1))) - print("** liefldd.diff(nm) [ EXTRA SYMBOLS] **\n{}".format('\n'.join(diff2))) + print( + "WARNING :: Disagreement regarding static lib exports in " + f"{filename} between nm (nsyms={len(exports)}) and " + "lielfldd (nsyms={len(exports2)}):" + ) + print( + "\n".join(("** nm.diff(liefldd) [MISSING SYMBOLS] **", *diff1)) + ) + print( + "\n".join(("** liefldd.diff(nm) [ EXTRA SYMBOLS] **", *diff2)) + ) if not result: binary = ensure_binary(filename) @@ -861,7 +1073,7 @@ def get_exports(filename, arch='native', enable_static=False): return result -def get_relocations(filename, arch='native'): +def get_relocations(filename, arch="native"): if not os.path.exists(filename): return [] try: @@ -874,12 +1086,12 @@ def get_relocations(filename, arch='native'): res.append(r.symbol.name) return res except: - print(f'WARNING: liefldd: failed get_relocations({filename})') + print(f"WARNING: liefldd: failed get_relocations({filename})") return [] -def get_symbols(file, defined=True, undefined=True, notexported=False, arch='native'): +def get_symbols(file, defined=True, undefined=True, notexported=False, arch="native"): binary = ensure_binary(file) first_undefined_symbol = 0 @@ -888,7 +1100,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat try: dyscmd = binary.dynamic_symbol_command first_undefined_symbol = dyscmd.idx_undefined_symbol - last_undefined_symbol = first_undefined_symbol + dyscmd.nb_undefined_symbols - 1 + last_undefined_symbol = ( + first_undefined_symbol + dyscmd.nb_undefined_symbols - 1 + ) except: pass res = [] @@ -903,15 +1117,17 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat for index, s in enumerate(syms): if debug_static_archives > 1: print(s) -# if s.type&16: -# continue + # if s.type&16: + # continue is_notexported = True - is_undefined = index >= first_undefined_symbol and index <= last_undefined_symbol + is_undefined = ( + index >= first_undefined_symbol and index <= last_undefined_symbol + ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = '%s' % s + s_name = f"{s}" else: - s_name = '%s' % s.name + s_name = f"{s.name}" if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -920,16 +1136,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat elif s.imported: is_undefined = False else: - s_name = '%s' % s.name + s_name = f"{s.name}" is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append('%s' % s_name) + res.append(f"{s_name}") elif not is_undefined and defined: - res.append('%s' % s_name) + res.append(f"{s_name}") return res @@ -941,6 +1157,7 @@ class memoized_by_arg0_filehash: The first argument is required to be an existing filename and it is always converted to an inode number. """ + def __init__(self, func): self.func = func self.cache = {} @@ -951,7 +1168,7 @@ def __call__(self, *args, **kw): for arg in args: if arg is args[0]: sha1 = hashlib.sha1() - with open(arg, 'rb') as f: + with open(arg, "rb") as f: while True: data = f.read(65536) if not data: @@ -978,17 +1195,17 @@ def __call__(self, *args, **kw): @memoized_by_arg0_filehash -def get_exports_memoized(filename, arch='native', enable_static=False): +def get_exports_memoized(filename, arch="native", enable_static=False): return get_exports(filename, arch=arch, enable_static=enable_static) @memoized_by_arg0_filehash -def get_imports_memoized(filename, arch='native'): +def get_imports_memoized(filename, arch="native"): return get_imports(filename, arch=arch) @memoized_by_arg0_filehash -def get_relocations_memoized(filename, arch='native'): +def get_relocations_memoized(filename, arch="native"): return get_relocations(filename, arch=arch) @@ -998,7 +1215,14 @@ def get_symbols_memoized(filename, defined, undefined, arch): @memoized_by_arg0_filehash -def get_linkages_memoized(filename, resolve_filenames, recurse, - sysroot='', envroot='', arch='native'): - return get_linkages(filename, resolve_filenames=resolve_filenames, - recurse=recurse, sysroot=sysroot, envroot=envroot, arch=arch) +def get_linkages_memoized( + filename, resolve_filenames, recurse, sysroot="", envroot="", arch="native" +): + return get_linkages( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + envroot=envroot, + arch=arch, + ) diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 5e19303bf7..17fc5d5a13 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -1,94 +1,114 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import os import re import stat import sys -from subprocess import Popen, check_output, PIPE, STDOUT, CalledProcessError -import os -from conda_build import utils from itertools import islice -from conda_build.os_utils.external import find_preferably_prefixed_executable +from subprocess import PIPE, STDOUT, CalledProcessError, Popen, check_output + +from .. import utils +from ..utils import on_mac +from .external import find_preferably_prefixed_executable NO_EXT = ( - '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', - '.xml', '.png', '.jpg', '.gif', '.class', '.in', '.sh', - '.yaml', '.md', '.ac', '.m4', '.cc', '.plist', + ".py", + ".pyc", + ".pyo", + ".h", + ".a", + ".c", + ".txt", + ".html", + ".xml", + ".png", + ".jpg", + ".gif", + ".class", + ".in", + ".sh", + ".yaml", + ".md", + ".ac", + ".m4", + ".cc", + ".plist", ) MAGIC = { - b'\xca\xfe\xba\xbe': 'MachO-universal', - b'\xce\xfa\xed\xfe': 'MachO-i386', - b'\xcf\xfa\xed\xfe': 'MachO-x86_64', - b'\xfe\xed\xfa\xce': 'MachO-ppc', - b'\xfe\xed\xfa\xcf': 'MachO-ppc64', + b"\xca\xfe\xba\xbe": "MachO-universal", + b"\xce\xfa\xed\xfe": "MachO-i386", + b"\xcf\xfa\xed\xfe": "MachO-x86_64", + b"\xfe\xed\xfa\xce": "MachO-ppc", + b"\xfe\xed\xfa\xcf": "MachO-ppc64", } FILETYPE = { - 1: 'MH_OBJECT', - 2: 'MH_EXECUTE', - 3: 'MH_FVMLIB', - 4: 'MH_CORE', - 5: 'MH_PRELOAD', - 6: 'MH_DYLIB', - 7: 'MH_DYLINKER', - 8: 'MH_BUNDLE', - 9: 'MH_DYLIB_STUB', - 10: 'MH_DSYM', - 11: 'MH_KEXT_BUNDLE', + 1: "MH_OBJECT", + 2: "MH_EXECUTE", + 3: "MH_FVMLIB", + 4: "MH_CORE", + 5: "MH_PRELOAD", + 6: "MH_DYLIB", + 7: "MH_DYLINKER", + 8: "MH_BUNDLE", + 9: "MH_DYLIB_STUB", + 10: "MH_DSYM", + 11: "MH_KEXT_BUNDLE", } def is_macho(path): if path.endswith(NO_EXT) or os.path.islink(path) or not os.path.isfile(path): return False - with open(path, 'rb') as fi: + with open(path, "rb") as fi: head = fi.read(4) return bool(head in MAGIC) def is_dylib(path, build_prefix): - return human_filetype(path) == 'DYLIB' + return human_filetype(path) == "DYLIB" def human_filetype(path, build_prefix): - otool = find_apple_cctools_executable('otool', build_prefix) - output = check_output((otool, '-h', path)).decode('utf-8') + otool = find_apple_cctools_executable("otool", build_prefix) + output = check_output((otool, "-h", path)).decode("utf-8") lines = output.splitlines() - if not lines[0].startswith((path, 'Mach header')): + if not lines[0].startswith((path, "Mach header")): raise ValueError( - 'Expected `otool -h` output to start with' - ' Mach header or {}, got:\n{}'.format(path, output) + "Expected `otool -h` output to start with" + f" Mach header or {path}, got:\n{output}" ) - assert lines[0].startswith((path, 'Mach header')), path + assert lines[0].startswith((path, "Mach header")), path for line in lines: - if line.strip().startswith('0x'): + if line.strip().startswith("0x"): header = line.split() filetype = int(header[4]) return FILETYPE[filetype][3:] def is_dylib_info(lines): - dylib_info = ('LC_ID_DYLIB', 'LC_LOAD_DYLIB') + dylib_info = ("LC_ID_DYLIB", "LC_LOAD_DYLIB") if len(lines) > 1 and lines[1].split()[1] in dylib_info: return True return False def is_id_dylib(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_ID_DYLIB': + if len(lines) > 1 and lines[1].split()[1] == "LC_ID_DYLIB": return True return False def is_load_dylib(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_LOAD_DYLIB': + if len(lines) > 1 and lines[1].split()[1] == "LC_LOAD_DYLIB": return True return False def is_rpath(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_RPATH': + if len(lines) > 1 and lines[1].split()[1] == "LC_RPATH": return True return False @@ -122,20 +142,20 @@ def _get_matching_load_commands(lines, cb_filter): # is fairly simple so let's just hardcode it for speed. if len(listy) == 2: key, value = listy - elif listy[0] == 'name' or listy[0] == 'path': + elif listy[0] == "name" or listy[0] == "path": # Create an entry for 'name offset' if there is one # as that can be useful if we need to know if there # is space to patch it for relocation purposes. - if listy[2] == '(offset': - key = listy[0] + ' offset' + if listy[2] == "(offset": + key = listy[0] + " offset" value = int(listy[3][:-1]) lcdict[key] = value key, value = listy[0:2] - elif listy[0] == 'time': - key = ' '.join(listy[0:3]) - value = ' '.join(listy[3:]) - elif listy[0] in ('current', 'compatibility'): - key = ' '.join(listy[0:2]) + elif listy[0] == "time": + key = " ".join(listy[0:3]) + value = " ".join(listy[3:]) + elif listy[0] in ("current", "compatibility"): + key = " ".join(listy[0:2]) value = listy[2] try: value = int(value) @@ -150,25 +170,32 @@ def find_apple_cctools_executable(name, build_prefix, nofail=False): tools = find_preferably_prefixed_executable(name, build_prefix, all_matches=True) for tool in tools: try: - if '/usr/bin' in tool: - with open(tool, 'rb') as f: + if "/usr/bin" in tool: + with open(tool, "rb") as f: s = f.read() - if s.find(b'usr/lib/libxcselect.dylib') != -1: + if s.find(b"usr/lib/libxcselect.dylib") != -1: # We ask xcrun. try: - tool_xcr = check_output(['xcrun', '-find', name], stderr=STDOUT).decode('utf-8').splitlines()[0] + tool_xcr = ( + check_output(["xcrun", "-find", name], stderr=STDOUT) + .decode("utf-8") + .splitlines()[0] + ) except Exception as e: log = utils.get_logger(__name__) - log.error("ERROR :: Found `{}` but is is an Apple Xcode stub executable\n" - "and it returned an error:\n{}".format(tool, e.output)) + log.error( + f"ERROR :: Found `{tool}` but is is an Apple Xcode stub executable\n" + f"and it returned an error:\n{e.output}" + ) raise e tool = tool_xcr if os.path.exists(tool): return tool - except Exception as _: # noqa - print("ERROR :: Failed to run `{}`. Please use `conda` to install `cctools` into your base environment.\n" - " An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`." - .format(tool)) + except Exception: # noqa + print( + f"ERROR :: Failed to run `{tool}`. Use `conda` to install `cctools` into your base environment.\n" + f" An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`." + ) sys.exit(1) return tool @@ -190,15 +217,17 @@ def otool(path, build_prefix=None, cb_filter=is_dylib_info): Any key values that can be converted to integers are converted to integers, the rest are strings. """ - otool = find_apple_cctools_executable('otool', build_prefix) - lines = check_output([otool, '-l', path], - stderr=STDOUT).decode('utf-8') + otool = find_apple_cctools_executable("otool", build_prefix) + lines = check_output([otool, "-l", path], stderr=STDOUT).decode("utf-8") # llvm-objdump returns 0 for some things that are anything but successful completion. lines_split = lines.splitlines() # 'invalid', 'expected' and 'unexpected' are too generic # here so also check that we do not get 'useful' output. - if len(lines_split) < 10 and (re.match('.*(is not a Mach-O|invalid|expected|unexpected).*', - lines, re.MULTILINE)): + if len(lines_split) < 10 and ( + re.match( + ".*(is not a Mach-O|invalid|expected|unexpected).*", lines, re.MULTILINE + ) + ): raise CalledProcessError(-1, otool) return _get_matching_load_commands(lines_split, cb_filter) @@ -206,22 +235,22 @@ def otool(path, build_prefix=None, cb_filter=is_dylib_info): def get_dylibs(path, build_prefix=None): """Return a list of the loaded dylib pathnames""" dylib_loads = otool(path, build_prefix, is_load_dylib) - return [dylib_load['name'] for dylib_load in dylib_loads] + return [dylib_load["name"] for dylib_load in dylib_loads] def get_id(path, build_prefix=None): """Returns the id name of the Mach-O file `path` or an empty string""" dylib_loads = otool(path, build_prefix, is_id_dylib) try: - return [dylib_load['name'] for dylib_load in dylib_loads][0] + return [dylib_load["name"] for dylib_load in dylib_loads][0] except: - return '' + return "" def get_rpaths(path, build_prefix=None): """Return a list of the dylib rpaths""" dylib_loads = otool(path, build_prefix, is_rpath) - return [dylib_load['path'] for dylib_load in dylib_loads] + return [dylib_load["path"] for dylib_load in dylib_loads] def _chmod(filename, mode): @@ -233,18 +262,18 @@ def _chmod(filename, mode): def install_name_tool(args, build_prefix=None, verbose=False): - args_full = [find_apple_cctools_executable('install_name_tool', build_prefix)] + args_full = [find_apple_cctools_executable("install_name_tool", build_prefix)] args_full.extend(args) if verbose: - print(' '.join(args_full)) + print(" ".join(args_full)) old_mode = stat.S_IMODE(os.stat(args[-1]).st_mode) new_mode = old_mode | stat.S_IWUSR if old_mode != new_mode: _chmod(args[-1], new_mode) subproc = Popen(args_full, stdout=PIPE, stderr=PIPE) out, err = subproc.communicate() - out = out.decode('utf-8') - err = err.decode('utf-8') + out = out.decode("utf-8") + err = err.decode("utf-8") if old_mode != new_mode: _chmod(args[-1], old_mode) return subproc.returncode, out, err @@ -254,10 +283,10 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): """Add an `rpath` to the Mach-O file at `path`""" if not is_macho(path): return - args = ['-add_rpath', rpath, path] + args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "would duplicate path, file already has LC_RPATH for:" in stderr: print("Skipping -add_rpath, file already has LC_RPATH set") @@ -265,18 +294,17 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d" - % code) + raise RuntimeError("install_name_tool failed with exit status %d" % code) def delete_rpath(path, rpath, build_prefix=None, verbose=False): """Delete an `rpath` from the Mach-O file at `path`""" if not is_macho(path): return - args = ['-delete_rpath', rpath, path] + args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "no LC_RPATH load command with path:" in stderr: print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") @@ -284,8 +312,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d" - % code) + raise RuntimeError("install_name_tool failed with exit status %d" % code) def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): @@ -308,24 +335,26 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): ret = True for index, new_name in changes: args = [] - if dylibs[index]['cmd'] == 'LC_ID_DYLIB': - args.extend(('-id', new_name, path)) + if dylibs[index]["cmd"] == "LC_ID_DYLIB": + args.extend(("-id", new_name, path)) else: - args.extend(('-change', dylibs[index]['name'], new_name, path)) + args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}") ret = False continue else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d, stderr of:\n%s" - % (code, stderr)) + raise RuntimeError( + "install_name_tool failed with exit status %d, stderr of:\n%s" + % (code, stderr) + ) return ret -if __name__ == '__main__': - if sys.platform == 'darwin': - for path in '/bin/ls', '/etc/locate.rc': +if __name__ == "__main__": + if on_mac: + for path in "/bin/ls", "/etc/locate.rc": print(path, is_macho(path)) diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index efce517ce2..ff48d5f891 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -1,14 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import glob +import logging import os import re import struct import sys -import logging +from functools import partial +from pathlib import Path -from conda_build.utils import ensure_list, get_logger +from ..utils import ensure_list, get_logger, on_linux, on_mac, on_win logging.basicConfig(level=logging.INFO) @@ -57,11 +61,11 @@ def __check_security_property(opt, filename, pattern): return results ''' -''' +""" Eventual goal is to become a full replacement for `ldd` `otool -L` and `ntldd' For now only works with ELF and Mach-O files and command-line execution is not supported. To get the list of shared libs use `inspect_linkages(filename)`. -''' +""" LDD_USAGE = """ Usage: ldd [OPTION]... FILE... @@ -74,7 +78,7 @@ def __check_security_property(opt, filename, pattern): For bug reporting instructions, please see: . -""" # noqa +""" # noqa OTOOL_USAGE = """ Usage: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool [-arch arch_type] [-fahlLDtdorSTMRIHGvVcXmqQjCP] [-mcpu=arg] [--version] ... @@ -110,35 +114,37 @@ def __check_security_property(opt, filename, pattern): -P print the info plist section as strings -C print linker optimization hints --version print the version of /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool -""" # noqa +""" # noqa ############################################## # Constants used in the Mach-O specification # ############################################## -MH_MAGIC = 0xfeedface -MH_CIGAM = 0xcefaedfe -MH_MAGIC_64 = 0xfeedfacf -MH_CIGAM_64 = 0xcffaedfe -FAT_MAGIC = 0xcafebabe -BIG_ENDIAN = '>' -LITTLE_ENDIAN = '<' -LC_ID_DYLIB = 0xd -LC_LOAD_DYLIB = 0xc +MH_MAGIC = 0xFEEDFACE +MH_CIGAM = 0xCEFAEDFE +MH_MAGIC_64 = 0xFEEDFACF +MH_CIGAM_64 = 0xCFFAEDFE +FAT_MAGIC = 0xCAFEBABE +BIG_ENDIAN = ">" +LITTLE_ENDIAN = "<" +LC_ID_DYLIB = 0xD +LC_LOAD_DYLIB = 0xC LC_LOAD_WEAK_DYLIB = 0x18 LC_LOAD_UPWARD_DYLIB = 0x23 -LC_REEXPORT_DYLIB = 0x1f +LC_REEXPORT_DYLIB = 0x1F LC_LAZY_LOAD_DYLIB = 0x20 -LC_LOAD_DYLIBS = (LC_LOAD_DYLIB, - LC_LOAD_WEAK_DYLIB, - LC_LOAD_UPWARD_DYLIB, - LC_LAZY_LOAD_DYLIB, - LC_REEXPORT_DYLIB) +LC_LOAD_DYLIBS = ( + LC_LOAD_DYLIB, + LC_LOAD_WEAK_DYLIB, + LC_LOAD_UPWARD_DYLIB, + LC_LAZY_LOAD_DYLIB, + LC_REEXPORT_DYLIB, +) LC_REQ_DYLD = 0x80000000 -LC_RPATH = 0x1c | LC_REQ_DYLD +LC_RPATH = 0x1C | LC_REQ_DYLD majver = sys.version_info[0] -maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint') +maxint = majver == 3 and getattr(sys, "maxsize") or getattr(sys, "maxint") class IncompleteRead(Exception): @@ -156,11 +162,11 @@ def __init__(self, file_obj): def read(self, size): buf = self._file_obj.read(size) if len(buf) != size: - raise IncompleteRead('requested number of bytes were not read.') + raise IncompleteRead("requested number of bytes were not read.") return buf def __getattr__(self, attr): - if attr == 'read': + if attr == "read": return self.read else: return getattr(self._file_obj, attr) @@ -182,16 +188,17 @@ def __init__(self, fileobj, start=0, size=maxint): self._pos = 0 def __repr__(self): - return '' % ( - self._start, self._end, self._fileobj) + return "" % (self._start, self._end, self._fileobj) def tell(self): return self._pos def _checkwindow(self, seekto, op): if not (self._start <= seekto <= self._end): - raise OSError("%s to offset %d is outside window [%d, %d]" % ( - op, seekto, self._start, self._end)) + raise OSError( + "%s to offset %d is outside window [%d, %d]" + % (op, seekto, self._start, self._end) + ) def seek(self, offset, whence=0): seekto = offset @@ -203,14 +210,14 @@ def seek(self, offset, whence=0): seekto += self._end else: raise OSError(f"Invalid whence argument to seek: {whence!r}") - self._checkwindow(seekto, 'seek') + self._checkwindow(seekto, "seek") self._fileobj.seek(seekto) self._pos = seekto - self._start def write(self, bytes): here = self._start + self._pos - self._checkwindow(here, 'write') - self._checkwindow(here + len(bytes), 'write') + self._checkwindow(here, "write") + self._checkwindow(here + len(bytes), "write") self._fileobj.seek(here, os.SEEK_SET) self._fileobj.write(bytes) self._pos += len(bytes) @@ -218,7 +225,7 @@ def write(self, bytes): def read(self, size=maxint): assert size >= 0 here = self._start + self._pos - self._checkwindow(here, 'read') + self._checkwindow(here, "read") size = min(size, self._end - here) self._fileobj.seek(here, os.SEEK_SET) bytes = self._fileobj.read(size) @@ -258,7 +265,7 @@ def read_data(file, endian, num=1): Read a given number of 32-bits unsigned integers from the given file with the given endianness. """ - res = struct.unpack(endian + 'L' * num, file.read(num * 4)) + res = struct.unpack(endian + "L" * num, file.read(num * 4)) if len(res) == 1: return res[0] return res @@ -273,11 +280,11 @@ def replace_lc_load_dylib(file, where, bits, endian, cmd, cmdsize, what, val): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] # If the string is what is being replaced, overwrite it. if load == what: file.seek(where + name_offset, os.SEEK_SET) - file.write(val.encode() + b'\0') + file.write(val.encode() + b"\0") return True return False @@ -291,7 +298,7 @@ def find_lc_load_dylib(file, where, bits, endian, cmd, cmdsize, what): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] # If the string is what is being replaced, overwrite it. if re.match(what, load): return load @@ -306,14 +313,15 @@ def find_lc_rpath(file, where, bits, endian, cmd, cmdsize): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] return load def do_macho(file, bits, endian, lc_operation, *args): # Read Mach-O header (the magic number is assumed read by the caller) - _cputype, _cpusubtype, filetype, ncmds, _sizeofcmds, _flags \ - = read_data(file, endian, 6) + _cputype, _cpusubtype, filetype, ncmds, _sizeofcmds, _flags = read_data( + file, endian, 6 + ) # 64-bits header has one more field. if bits == 64: read_data(file, endian) @@ -323,8 +331,7 @@ def do_macho(file, bits, endian, lc_operation, *args): where = file.tell() # Read command header cmd, cmdsize = read_data(file, endian, 2) - results.append(lc_operation(file, where, bits, endian, cmd, cmdsize, - *args)) + results.append(lc_operation(file, where, bits, endian, cmd, cmdsize, *args)) # Seek to the next command file.seek(where + cmdsize, os.SEEK_SET) return filetype, results @@ -345,36 +352,19 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): nfat_arch = read_data(file, BIG_ENDIAN) for _n in range(nfat_arch): # Read arch header - _cputype, _cpusubtype, offset, size, _align = \ - read_data(file, BIG_ENDIAN, 5) - do_file(file, lc_operation, offset_size(offset, size), arch, - results, *args) - elif magic == MH_MAGIC and arch in ('any', 'ppc32', 'm68k'): + _cputype, _cpusubtype, offset, size, _align = read_data(file, BIG_ENDIAN, 5) + do_file(file, lc_operation, offset_size(offset, size), arch, results, *args) + elif magic == MH_MAGIC and arch in ("any", "ppc32", "m68k"): results.append(do_macho(file, 32, BIG_ENDIAN, lc_operation, *args)) - elif magic == MH_CIGAM and arch in ('any', 'i386'): + elif magic == MH_CIGAM and arch in ("any", "i386"): results.append(do_macho(file, 32, LITTLE_ENDIAN, lc_operation, *args)) - elif magic == MH_MAGIC_64 and arch in ('any', 'ppc64'): + elif magic == MH_MAGIC_64 and arch in ("any", "ppc64"): results.append(do_macho(file, 64, BIG_ENDIAN, lc_operation, *args)) - elif magic == MH_CIGAM_64 and arch in ('any', 'x86_64'): + elif magic == MH_CIGAM_64 and arch in ("any", "x86_64"): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) -def mach_o_change(path, arch, what, value): - """ - Replace a given name (what) in any LC_LOAD_DYLIB command found in - the given binary with a new name (value), provided it's shorter. - """ - - assert len(what) >= len(value) - - results = [] - with open(path, 'r+b') as f: - do_file(f, replace_lc_load_dylib, offset_size(), arch, results, - what, value) - return results - - -def mach_o_find_dylibs(ofile, arch, regex='.*'): +def mach_o_find_dylibs(ofile, arch, regex=".*"): """ Finds the executable's view of where any dylibs live without resolving any macros (@rpath, @loader_path, @executable_path) @@ -393,63 +383,70 @@ def mach_o_find_rpaths(ofile, arch): return results -def _get_resolved_location(codefile, - unresolved, - exe_dir, - self_dir, - LD_LIBRARY_PATH='', - default_paths=None, - sysroot='', - resolved_rpath=None): - ''' - From `man ld.so` - - When resolving shared object dependencies, the dynamic linker first inspects each dependency - string to see if it contains a slash (this can occur if a shared object pathname containing - slashes was specified at link time). If a slash is found, then the dependency string is - interpreted as a (relative or absolute) pathname, and the shared object is loaded using that - pathname. - - If a shared object dependency does not contain a slash, then it is searched for in the - following order: - - o Using the directories specified in the DT_RPATH dynamic section attribute of the binary - if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. - - o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in - secure-execution mode; see below). in which case it is ignored. - - o Using the directories specified in the DT_RUNPATH dynamic section attribute of the - binary if present. Such directories are searched only to find those objects required - by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, - which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, - which is applied to searches for all children in the dependency tree. - - o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate - shared objects previously found in the augmented library path. If, however, the binary - was linked with the -z nodeflib linker option, shared objects in the default paths are - skipped. Shared objects installed in hardware capability directories (see below) are - preferred to other shared objects. - - o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default - paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was - linked with the -z nodeflib linker option, this step is skipped. - - Returns a tuple of resolved location, rpath_used, in_sysroot - ''' +def _get_resolved_location( + codefile, + unresolved, + exe_dir, + self_dir, + LD_LIBRARY_PATH="", + default_paths=None, + sysroot="", + resolved_rpath=None, +): + """ + From `man ld.so` + + When resolving shared object dependencies, the dynamic linker first inspects each dependency + string to see if it contains a slash (this can occur if a shared object pathname containing + slashes was specified at link time). If a slash is found, then the dependency string is + interpreted as a (relative or absolute) pathname, and the shared object is loaded using that + pathname. + + If a shared object dependency does not contain a slash, then it is searched for in the + following order: + + o Using the directories specified in the DT_RPATH dynamic section attribute of the binary + if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. + + o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in + secure-execution mode; see below). in which case it is ignored. + + o Using the directories specified in the DT_RUNPATH dynamic section attribute of the + binary if present. Such directories are searched only to find those objects required + by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, + which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, + which is applied to searches for all children in the dependency tree. + + o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate + shared objects previously found in the augmented library path. If, however, the binary + was linked with the -z nodeflib linker option, shared objects in the default paths are + skipped. Shared objects installed in hardware capability directories (see below) are + preferred to other shared objects. + + o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default + paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was + linked with the -z nodeflib linker option, this step is skipped. + + Returns a tuple of resolved location, rpath_used, in_sysroot + """ rpath_result = None found = False - ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(':') - if unresolved.startswith('$RPATH'): - these_rpaths = [resolved_rpath] if resolved_rpath else \ - codefile.get_rpaths_transitive() + \ - ld_library_paths + \ - codefile.get_rpaths_nontransitive() + \ - [dp.replace('$SYSROOT', sysroot) for dp in ensure_list(default_paths)] + ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(":") + if unresolved.startswith("$RPATH"): + these_rpaths = ( + [resolved_rpath] + if resolved_rpath + else codefile.get_rpaths_transitive() + + ld_library_paths + + codefile.get_rpaths_nontransitive() + + [dp.replace("$SYSROOT", sysroot) for dp in ensure_list(default_paths)] + ) for rpath in these_rpaths: - resolved = unresolved.replace('$RPATH', rpath) \ - .replace('$SELFDIR', self_dir) \ - .replace('$EXEDIR', exe_dir) + resolved = ( + unresolved.replace("$RPATH", rpath) + .replace("$SELFDIR", self_dir) + .replace("$EXEDIR", exe_dir) + ) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) if resolved_rpath or exists or exists_sysroot: @@ -459,13 +456,12 @@ def _get_resolved_location(codefile, if not found: # Return the so name so that it can be warned about as missing. return unresolved, None, False - elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): - resolved = unresolved.replace('$SELFDIR', self_dir) \ - .replace('$EXEDIR', exe_dir) + elif any(a in unresolved for a in ("$SELFDIR", "$EXEDIR")): + resolved = unresolved.replace("$SELFDIR", self_dir).replace("$EXEDIR", exe_dir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) else: - if unresolved.startswith('/'): + if unresolved.startswith("/"): return unresolved, None, False else: return os.path.join(self_dir, unresolved), None, False @@ -473,13 +469,18 @@ def _get_resolved_location(codefile, return resolved, rpath_result, exists_sysroot -def _get_resolved_relocated_location(codefile, so, src_exedir, src_selfdir, - dst_exedir, dst_selfdir): - src_resolved, rpath, in_sysroot = _get_resolved_location(codefile, so, src_exedir, src_selfdir) +def _get_resolved_relocated_location( + codefile, so, src_exedir, src_selfdir, dst_exedir, dst_selfdir +): + src_resolved, rpath, in_sysroot = _get_resolved_location( + codefile, so, src_exedir, src_selfdir + ) if in_sysroot: dst_resolved = src_resolved else: - dst_resolved = _get_resolved_location(codefile, so, dst_exedir, dst_selfdir, rpath) + dst_resolved = _get_resolved_location( + codefile, so, dst_exedir, dst_selfdir, rpath + ) return src_resolved, dst_resolved, in_sysroot @@ -496,45 +497,49 @@ def __init__(self, file, arch, initial_rpaths_transitive=[]): file.seek(0) self.rpaths_transitive = initial_rpaths_transitive _filetypes, rpaths = zip(*mach_o_find_rpaths(file, arch)) - local_rpaths = [self.from_os_varnames(rpath.rstrip('/')) - for rpath in rpaths[0] if rpath] + local_rpaths = [ + self.from_os_varnames(rpath.rstrip("/")) for rpath in rpaths[0] if rpath + ] self.rpaths_transitive.extend(local_rpaths) self.rpaths_nontransitive = local_rpaths self.shared_libraries.extend( - [(so, self.from_os_varnames(so)) for so in sos[0] if so]) + [(so, self.from_os_varnames(so)) for so in sos[0] if so] + ) file.seek(0) def to_os_varnames(self, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - return input_.replace('$SELFDIR', '@loader_path') \ - .replace('$EXEDIR', '@executable_path') \ - .replace('$RPATH', '@rpath') + return ( + input_.replace("$SELFDIR", "@loader_path") + .replace("$EXEDIR", "@executable_path") + .replace("$RPATH", "@rpath") + ) def from_os_varnames(self, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - return input_.replace('@loader_path', '$SELFDIR') \ - .replace('@executable_path', '$EXEDIR') \ - .replace('@rpath', '$RPATH') + return ( + input_.replace("@loader_path", "$SELFDIR") + .replace("@executable_path", "$EXEDIR") + .replace("@rpath", "$RPATH") + ) - def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=''): + def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=""): result = [] for so_orig, so in self.shared_libraries: - resolved, rpath, in_sysroot = \ - _get_resolved_location(self, so, src_exedir, src_selfdir, sysroot) + resolved, rpath, in_sysroot = _get_resolved_location( + self, so, src_exedir, src_selfdir, sysroot + ) result.append((so_orig, resolved, rpath, in_sysroot)) return result - def get_relocated_shared_libraries(self, src_exedir, src_selfdir, - dst_exedir, dst_selfdir): + def get_relocated_shared_libraries( + self, src_exedir, src_selfdir, dst_exedir, dst_selfdir + ): result = [] for so in self.shared_libraries: - resolved, dst_resolved, in_sysroot = \ - _get_resolved_relocated_location(self, - so, - src_exedir, - src_selfdir, - dst_exedir, - dst_selfdir) + resolved, dst_resolved, in_sysroot = _get_resolved_relocated_location( + self, so, src_exedir, src_selfdir, dst_exedir, dst_selfdir + ) result.append((so, resolved, dst_resolved, in_sysroot)) return result @@ -546,7 +551,7 @@ def uniqueness_key(self): # Constants used in the ELF specification # ########################################### -ELF_HDR = 0x7f454c46 +ELF_HDR = 0x7F454C46 E_TYPE_RELOCATABLE = 1 E_TYPE_EXECUTABLE = 2 E_TYPE_SHARED = 3 @@ -557,11 +562,11 @@ def uniqueness_key(self): E_MACHINE_MIPS = 0x08 E_MACHINE_POWERPC = 0x14 E_MACHINE_ARM = 0x28 -E_MACHINE_SUPERH = 0x2a +E_MACHINE_SUPERH = 0x2A E_MACHINE_IA_64 = 0x32 -E_MACHINE_X86_64 = 0x3e -E_MACHINE_AARCH64 = 0xb7 -E_MACHINE_RISC_V = 0xf3 +E_MACHINE_X86_64 = 0x3E +E_MACHINE_AARCH64 = 0xB7 +E_MACHINE_RISC_V = 0xF3 # It'd be quicker to use struct.calcsize here and a single # struct.unpack but it would be ugly and harder to maintain. @@ -574,10 +579,10 @@ def uniqueness_key(self): PT_PHDR = 6 PT_LOOS = 0x60000000 PT_LOPROC = 0x70000000 -PT_HIPROC = 0x7fffffff -PT_GNU_EH_FRAME = (PT_LOOS + 0x474e550) -PT_GNU_STACK = (PT_LOOS + 0x474e551) -PT_GNU_RELRO = (PT_LOOS + 0x474e552) +PT_HIPROC = 0x7FFFFFFF +PT_GNU_EH_FRAME = PT_LOOS + 0x474E550 +PT_GNU_STACK = PT_LOOS + 0x474E551 +PT_GNU_RELRO = PT_LOOS + 0x474E552 SHT_PROGBITS = 0x1 SHT_SYMTAB = 0x2 @@ -608,8 +613,8 @@ def uniqueness_key(self): SHF_OS_NONCONFORMING = 0x100 SHF_GROUP = 0x200 SHF_TLS = 0x400 -SHF_MASKOS = 0x0ff00000 -SHF_MASKPROC = 0xf0000000 +SHF_MASKOS = 0x0FF00000 +SHF_MASKPROC = 0xF0000000 SHF_ORDERED = 0x4000000 SHF_EXCLUDE = 0x8000000 @@ -644,57 +649,54 @@ def uniqueness_key(self): DT_FINI_ARRAYSZ = 28 DT_RUNPATH = 29 DT_LOOS = 0x60000000 -DT_HIOS = 0x6fffffff +DT_HIOS = 0x6FFFFFFF DT_LOPROC = 0x70000000 -DT_HIPROC = 0x7fffffff +DT_HIPROC = 0x7FFFFFFF class elfheader: def __init__(self, file): - self.hdr, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) + (self.hdr,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) self.dt_needed = [] self.dt_rpath = [] if self.hdr != ELF_HDR: return - bitness, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1)) + (bitness,) = struct.unpack(LITTLE_ENDIAN + "B", file.read(1)) bitness = 32 if bitness == 1 else 64 sz_ptr = int(bitness / 8) - ptr_type = 'Q' if sz_ptr == 8 else 'L' + ptr_type = "Q" if sz_ptr == 8 else "L" self.bitness = bitness self.sz_ptr = sz_ptr self.ptr_type = ptr_type - endian, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1)) + (endian,) = struct.unpack(LITTLE_ENDIAN + "B", file.read(1)) endian = LITTLE_ENDIAN if endian == 1 else BIG_ENDIAN self.endian = endian - self.version, = struct.unpack(endian + 'B', file.read(1)) - self.osabi, = struct.unpack(endian + 'B', file.read(1)) - self.abiver, = struct.unpack(endian + 'B', file.read(1)) - struct.unpack(endian + 'B' * 7, file.read(7)) - self.type, = struct.unpack(endian + 'H', file.read(2)) - self.machine, = struct.unpack(endian + 'H', file.read(2)) - self.version, = struct.unpack(endian + 'L', file.read(4)) - self.entry, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.phoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.shoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.flags, = struct.unpack(endian + 'L', file.read(4)) - self.ehsize, = struct.unpack(endian + 'H', file.read(2)) - self.phentsize, = struct.unpack(endian + 'H', file.read(2)) - self.phnum, = struct.unpack(endian + 'H', file.read(2)) - self.shentsize, = struct.unpack(endian + 'H', file.read(2)) - self.shnum, = struct.unpack(endian + 'H', file.read(2)) - self.shstrndx, = struct.unpack(endian + 'H', file.read(2)) + (self.version,) = struct.unpack(endian + "B", file.read(1)) + (self.osabi,) = struct.unpack(endian + "B", file.read(1)) + (self.abiver,) = struct.unpack(endian + "B", file.read(1)) + struct.unpack(endian + "B" * 7, file.read(7)) + (self.type,) = struct.unpack(endian + "H", file.read(2)) + (self.machine,) = struct.unpack(endian + "H", file.read(2)) + (self.version,) = struct.unpack(endian + "L", file.read(4)) + (self.entry,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.phoff,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.shoff,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.flags,) = struct.unpack(endian + "L", file.read(4)) + (self.ehsize,) = struct.unpack(endian + "H", file.read(2)) + (self.phentsize,) = struct.unpack(endian + "H", file.read(2)) + (self.phnum,) = struct.unpack(endian + "H", file.read(2)) + (self.shentsize,) = struct.unpack(endian + "H", file.read(2)) + (self.shnum,) = struct.unpack(endian + "H", file.read(2)) + (self.shstrndx,) = struct.unpack(endian + "H", file.read(2)) loc = file.tell() if loc != self.ehsize: - get_logger(__name__).warning(f'file.tell()={loc} != ehsize={self.ehsize}') + get_logger(__name__).warning(f"file.tell()={loc} != ehsize={self.ehsize}") def __str__(self): - return 'bitness {}, endian {}, version {}, type {}, machine {}, entry {}'.format( # noqa - self.bitness, - self.endian, - self.version, - self.type, - hex(self.machine), - hex(self.entry)) + return ( + f"bitness {self.bitness}, endian {self.endian}, version {self.version}, " + f"type {self.type}, machine {hex(self.machine)}, entry {hex(self.entry)}" + ) class elfsection: @@ -704,16 +706,16 @@ def __init__(self, eh, file): endian = eh.endian # It'd be quicker to use struct.calcsize here and a single # struct.unpack but it would be ugly and harder to maintain. - self.sh_name, = struct.unpack(endian + 'L', file.read(4)) - self.sh_type, = struct.unpack(endian + 'L', file.read(4)) - self.sh_flags, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_addr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_offset, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_size, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_link, = struct.unpack(endian + 'L', file.read(4)) - self.sh_info, = struct.unpack(endian + 'L', file.read(4)) - self.sh_addralign, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_entsize, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_name,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_type,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_flags,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_addr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_offset,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_size,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_link,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_info,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_addralign,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_entsize,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) # Lower priority == post processed earlier so that those # with higher priority can assume already initialized. if self.sh_type == SHT_STRTAB: @@ -776,7 +778,7 @@ def postprocess(self, elffile, file): dt_needed = [] dt_rpath = [] dt_runpath = [] - dt_soname = '$EXECUTABLE' + dt_soname = "$EXECUTABLE" if self.sh_entsize == 0: # Some ELF files (e.g., Guile's .go files) include sections # without a table of entries in which case sh_entsize will be 0 @@ -785,8 +787,8 @@ def postprocess(self, elffile, file): num_entries = int(self.sh_size / self.sh_entsize) for m in range(num_entries): file.seek(self.sh_offset + (m * self.sh_entsize)) - d_tag, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - d_val_ptr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (d_tag,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (d_val_ptr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) if d_tag == DT_NEEDED: dt_needed.append(d_val_ptr) elif d_tag == DT_RPATH: @@ -801,20 +803,20 @@ def postprocess(self, elffile, file): strsec, _offset = elffile.find_section_and_offset(dt_strtab_ptr) if strsec and strsec.sh_type == SHT_STRTAB: for n in dt_needed: - end = n + strsec.table[n:].index('\0') + end = n + strsec.table[n:].index("\0") elffile.dt_needed.append(strsec.table[n:end]) for r in dt_rpath: - end = r + strsec.table[r:].index('\0') + end = r + strsec.table[r:].index("\0") path = strsec.table[r:end] - rpaths = [p for p in path.split(':') if path] - elffile.dt_rpath.extend([p.rstrip('/') for p in rpaths]) + rpaths = [p for p in path.split(":") if path] + elffile.dt_rpath.extend([p.rstrip("/") for p in rpaths]) for r in dt_runpath: - end = r + strsec.table[r:].index('\0') + end = r + strsec.table[r:].index("\0") path = strsec.table[r:end] - rpaths = [p for p in path.split(':') if path] - elffile.dt_runpath.extend([p.rstrip('/') for p in rpaths]) - if dt_soname != '$EXECUTABLE': - end = dt_soname + strsec.table[dt_soname:].index('\0') + rpaths = [p for p in path.split(":") if path] + elffile.dt_runpath.extend([p.rstrip("/") for p in rpaths]) + if dt_soname != "$EXECUTABLE": + end = dt_soname + strsec.table[dt_soname:].index("\0") elffile.dt_soname = strsec.table[dt_soname:end] # runpath always takes precedence. @@ -827,17 +829,17 @@ def __init__(self, eh, file): ptr_type = eh.ptr_type sz_ptr = eh.sz_ptr endian = eh.endian - self.p_type, = struct.unpack(endian + 'L', file.read(4)) + (self.p_type,) = struct.unpack(endian + "L", file.read(4)) if eh.bitness == 64: - self.p_flags, = struct.unpack(endian + 'L', file.read(4)) - self.p_offset, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_vaddr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_paddr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_filesz, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_memsz, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_flags,) = struct.unpack(endian + "L", file.read(4)) + (self.p_offset,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_vaddr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_paddr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_filesz,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_memsz,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) if eh.bitness == 32: - self.p_flags, = struct.unpack(endian + 'L', file.read(4)) - self.p_align, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_flags,) = struct.unpack(endian + "L", file.read(4)) + (self.p_align,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) def postprocess(self, elffile, file): if self.p_type == PT_INTERP: @@ -845,7 +847,7 @@ def postprocess(self, elffile, file): elffile.program_interpreter = file.read(self.p_filesz - 1).decode() elif self.p_type == PT_LOAD: file.seek(self.p_offset) - if hasattr(elffile, 'ptload_p_vaddr'): + if hasattr(elffile, "ptload_p_vaddr"): elffile.ptload_p_vaddr.append(self.p_vaddr) elffile.ptload_p_paddr.append(self.p_paddr) else: @@ -862,7 +864,7 @@ def __init__(self, file, initial_rpaths_transitive=[]): self.programheaders = [] self.elfsections = [] self.program_interpreter = None - self.dt_soname = '$EXECUTABLE' + self.dt_soname = "$EXECUTABLE" self._dir = os.path.dirname(file.name) for n in range(self.ehdr.phnum): @@ -888,32 +890,35 @@ def __init__(self, file, initial_rpaths_transitive=[]): dt_rpath = [p.rstrip("/") for p in self.dt_rpath] dt_runpath = [p.rstrip("/") for p in self.dt_runpath] - self.rpaths_transitive = [self.from_os_varnames(rpath) - for rpath in (initial_rpaths_transitive + dt_rpath)] - self.rpaths_nontransitive = [self.from_os_varnames(rpath) - for rpath in dt_runpath] + self.rpaths_transitive = [ + self.from_os_varnames(rpath) + for rpath in (initial_rpaths_transitive + dt_rpath) + ] + self.rpaths_nontransitive = [ + self.from_os_varnames(rpath) for rpath in dt_runpath + ] # Lookup must be avoided when DT_NEEDED contains any '/'s - self.shared_libraries = [(needed, needed if '/' in needed else '$RPATH/' + needed) - for needed in self.dt_needed] + self.shared_libraries = [ + (needed, needed if "/" in needed else "$RPATH/" + needed) + for needed in self.dt_needed + ] def to_os_varnames(self, input): if self.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$SELFDIR', '$ORIGIN') \ - .replace(libdir, '$LIB') + libdir = "/lib" + return input.replace("$SELFDIR", "$ORIGIN").replace(libdir, "$LIB") def from_os_varnames(self, input): if self.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$ORIGIN', '$SELFDIR') \ - .replace('$LIB', libdir) + libdir = "/lib" + return input.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir) def find_section_and_offset(self, addr): - 'Can be called immediately after the elfsections have been constructed' + "Can be called immediately after the elfsections have been constructed" for es in self.elfsections: if addr >= es.sh_addr and addr < es.sh_addr + es.sh_size: # sections which do not appear in the memory image of the @@ -923,20 +928,21 @@ def find_section_and_offset(self, addr): return es, addr - es.sh_addr return None, None - def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=''): + def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=""): result = [] - default_paths = ['$SYSROOT/lib', '$SYSROOT/usr/lib'] + default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"] if self.ehdr.sz_ptr == 8: - default_paths.extend(['$SYSROOT/lib64', '$SYSROOT/usr/lib64']) + default_paths.extend(["$SYSROOT/lib64", "$SYSROOT/usr/lib64"]) for so_orig, so in self.shared_libraries: - resolved, rpath, in_sysroot = \ - _get_resolved_location(self, - so, - src_exedir, - src_selfdir, - LD_LIBRARY_PATH='', - default_paths=default_paths, - sysroot=sysroot) + resolved, rpath, in_sysroot = _get_resolved_location( + self, + so, + src_exedir, + src_selfdir, + LD_LIBRARY_PATH="", + default_paths=default_paths, + sysroot=sysroot, + ) result.append((so_orig, resolved, rpath, in_sysroot)) return result @@ -967,11 +973,10 @@ def get_dir(self): return self._dir def uniqueness_key(self): - return 'unknown' + return "unknown" class DLLfile(UnixExecutable): - def __init__(self, file, initial_rpaths_transitive=[]): pass @@ -988,7 +993,7 @@ def get_dir(self): return None def uniqueness_key(self): - return 'unknown' + return "unknown" class EXEfile: @@ -996,10 +1001,10 @@ def __init__(self, file, initial_rpaths_transitive=[]): self.super.__init__(self, file, initial_rpaths_transitive) -def codefile(file, arch='any', initial_rpaths_transitive=[]): - if file.name.endswith('.dll'): +def codefile(file, arch="any", initial_rpaths_transitive=[]): + if file.name.endswith(".dll"): return DLLfile(file, list(initial_rpaths_transitive)) - magic, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) + (magic,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) file.seek(0) if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): return machofile(file, arch, list(initial_rpaths_transitive)) @@ -1009,59 +1014,51 @@ def codefile(file, arch='any', initial_rpaths_transitive=[]): return inscrutablefile(file, list(initial_rpaths_transitive)) -def codefile_class(filename, skip_symlinks=False): - if os.path.islink(filename): - if skip_symlinks: - return None - else: - filename = os.path.realpath(filename) - if os.path.isdir(filename): +def codefile_class( + path: str | os.PathLike | Path, + skip_symlinks: bool = False, +) -> type[DLLfile | EXEfile | machofile | elffile] | None: + # same signature as conda.os_utils.liefldd.codefile_class + path = Path(path) + if skip_symlinks and path.is_symlink(): return None - if filename.endswith(('.dll', '.pyd')): + path = path.resolve() + + def _get_magic_bit(path: Path) -> bytes: + with path.open("rb") as handle: + bit = handle.read(4) + return struct.unpack(BIG_ENDIAN + "L", bit)[0] + + if path.is_dir(): + return None + elif path.suffix.lower() in (".dll", ".pyd"): return DLLfile - if filename.endswith('.exe'): + elif path.suffix.lower() == ".exe": return EXEfile - # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. - if filename.endswith('.class'): + elif path.suffix.lower() == ".class": + # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. return None - if not os.path.exists(filename) or os.path.getsize(filename) < 4: + elif not path.exists() or path.stat().st_size < 4: return None - with open(filename, 'rb') as file: - magic, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) - file.seek(0) - if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): - return machofile - elif magic == ELF_HDR: - return elffile - return None - - -def is_codefile(filename, skip_symlinks=True): - klass = codefile_class(filename, skip_symlinks=skip_symlinks) - if not klass: - return False - return True - - -def codefile_type(filename, skip_symlinks=True): - "Returns None, 'machofile' or 'elffile'" - klass = codefile_class(filename, skip_symlinks=skip_symlinks) - if not klass: + elif (magic := _get_magic_bit(path)) == ELF_HDR: + return elffile + elif magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): + return machofile + else: return None - return klass.__name__ -def _trim_sysroot(sysroot): +def _trim_sysroot(sysroot: str) -> str: if sysroot: - while sysroot.endswith('/') or sysroot.endswith('\\'): + while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] return sysroot def _get_arch_if_native(arch): - if arch == 'native': - if sys.platform == 'win32': - arch = 'x86_64' if sys.maxsize > 2**32 else 'i686' + if arch == "native": + if on_win: + arch = "x86_64" if sys.maxsize > 2**32 else "i686" else: _, _, _, _, arch = os.uname() return arch @@ -1069,20 +1066,20 @@ def _get_arch_if_native(arch): # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot='', arch='native'): - ''' +def _inspect_linkages_this(filename, sysroot: str = "", arch="native"): + """ :param filename: :param sysroot: :param arch: :return: - ''' + """ if not os.path.exists(filename): return None, [], [] sysroot = _trim_sysroot(sysroot) arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: + with open(filename, "rb") as f: # TODO :: Problems here: # TODO :: 1. macOS can modify RPATH for children in each .so # TODO :: 2. Linux can identify the program interpreter which can change the default_paths @@ -1091,7 +1088,7 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): except IncompleteRead: # the file was incomplete, can occur if a package ships a test file # which looks like an ELF file but is not. Orange3 does this. - get_logger(__name__).warning(f'problems inspecting linkages for {filename}') + get_logger(__name__).warning(f"problems inspecting linkages for {filename}") return None, [], [] dirname = os.path.dirname(filename) results = cf.get_resolved_shared_libraries(dirname, dirname, sysroot) @@ -1101,41 +1098,10 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): return cf.uniqueness_key(), orig_names, resolved_names -def inspect_rpaths(filename, resolve_dirnames=True, use_os_varnames=True, - sysroot='', arch='native'): - if not os.path.exists(filename): - return [], [] - sysroot = _trim_sysroot(sysroot) - arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: - # TODO :: Problems here: - # TODO :: 1. macOS can modify RPATH for children in each .so - # TODO :: 2. Linux can identify the program interpreter which can change the initial RPATHs - # TODO :: Should '/lib', '/usr/lib' not include (or be?!) `sysroot`(s) instead? - cf = codefile(f, arch, ['/lib', '/usr/lib']) - if resolve_dirnames: - return [_get_resolved_location(cf, rpath, os.path.dirname(filename), - os.path.dirname(filename), sysroot)[0] - for rpath in cf.rpaths_nontransitive] - else: - if use_os_varnames: - return [cf.to_os_varnames(rpath) for rpath in cf.rpaths_nontransitive] - else: - return cf.rpaths_nontransitive - - -def get_runpaths(filename, arch='native'): - if not os.path.exists(filename): - return [] - arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: - cf = codefile(f, arch, ['/lib', '/usr/lib']) - return cf.get_runpaths() - - # TODO :: Consider returning a tree structure or a dict when recurse is True? -def inspect_linkages(filename, resolve_filenames=True, recurse=True, - sysroot='', arch='native'): +def inspect_linkages( + filename, resolve_filenames=True, recurse=True, sysroot: str = "", arch="native" +): already_seen = set() todo = {filename} done = set() @@ -1143,13 +1109,14 @@ def inspect_linkages(filename, resolve_filenames=True, recurse=True, while todo != done: filename = next(iter(todo - done)) uniqueness_key, these_orig, these_resolved = _inspect_linkages_this( - filename, sysroot=sysroot, arch=arch) + filename, sysroot=sysroot, arch=arch + ) if uniqueness_key not in already_seen: for orig, resolved in zip(these_orig, these_resolved): if resolve_filenames: - rec = {'orig': orig, 'resolved': os.path.normpath(resolved)} + rec = {"orig": orig, "resolved": os.path.normpath(resolved)} else: - rec = {'orig': orig} + rec = {"orig": orig} results[orig] = rec if recurse: todo.update(these_resolved) @@ -1158,150 +1125,150 @@ def inspect_linkages(filename, resolve_filenames=True, recurse=True, return results -def inspect_linkages_otool(filename, arch='native'): +def inspect_linkages_otool(filename, arch="native"): from subprocess import check_output - args = ['/usr/bin/otool'] - if arch != 'native': - args.extend(['-arch', arch]) + + args = ["/usr/bin/otool"] + if arch != "native": + args.extend(["-arch", arch]) else: # 'x86_64' if sys.maxsize > 2**32 else 'i386' - args.extend(['-arch', os.uname()[4]]) - args.extend(['-L', filename]) - result = check_output(args).decode(encoding='ascii') - groups = re.findall(r'^\t(.*) \(compatibility', result, re.MULTILINE) + args.extend(["-arch", os.uname()[4]]) + args.extend(["-L", filename]) + result = check_output(args).decode(encoding="ascii") + groups = re.findall(r"^\t(.*) \(compatibility", result, re.MULTILINE) return groups # TODO :: Consider allowing QEMU/binfmt_misc to run foreign binaries + passing a sysroot here? def inspect_linkages_ldd(filename): from subprocess import PIPE, Popen - process = Popen(['/usr/bin/ldd', filename], stdout=PIPE, stderr=PIPE) + + process = Popen(["/usr/bin/ldd", filename], stdout=PIPE, stderr=PIPE) result, err = process.communicate() - result = result.decode(encoding='ascii') - err = err.decode(encoding='ascii') - groups = re.findall(r'^\t(?!linux-gate\.so\.1.*$)[^ ]+ => (.*) \([0-9a-fx]+\)', - result, re.MULTILINE) + result = result.decode(encoding="ascii") + err = err.decode(encoding="ascii") + groups = re.findall( + r"^\t(?!linux-gate\.so\.1.*$)[^ ]+ => (.*) \([0-9a-fx]+\)", result, re.MULTILINE + ) return groups def otool(*args): - parser = argparse.ArgumentParser(prog='otool', add_help=False) - parser.add_argument("-h", "--help", action='store_true') - parser.add_argument("-arch", dest='arch_type', help="arch_type", - default='native') - parser.add_argument("-L", dest='filename', - help="print shared libraries used") + parser = argparse.ArgumentParser(prog="otool", add_help=False) + parser.add_argument("-h", "--help", action="store_true") + parser.add_argument("-arch", dest="arch_type", help="arch_type", default="native") + parser.add_argument("-L", dest="filename", help="print shared libraries used") args = parser.parse_args(args) if args.help: print(OTOOL_USAGE) return 0 if args.filename: - shared_libs = inspect_linkages(args.filename, resolve_filenames=False, - recurse=False, arch=args.arch_type) - print("Shared libs used (non-recursively) by {} are:\n{}".format(args.filename, - shared_libs)) + shared_libs = inspect_linkages( + args.filename, resolve_filenames=False, recurse=False, arch=args.arch_type + ) + print( + f"Shared libs used (non-recursively) by {args.filename} are:\n{shared_libs}" + ) return 0 return 1 -def otool_sys(*args): - import subprocess - result = subprocess.check_output('/usr/bin/otool', args).\ - decode(encoding='ascii') - return result - - -def ldd_sys(*args): - result = [] - return result - - def ldd(*args): - parser = argparse.ArgumentParser(prog='ldd', add_help=False) - parser.add_argument("-h", "--help", action='store_true') + parser = argparse.ArgumentParser(prog="ldd", add_help=False) + parser.add_argument("-h", "--help", action="store_true") parser.add_argument("filename") args = parser.parse_args(args) if args.help: print(LDD_USAGE) return 0 if args.filename: - shared_libs = inspect_linkages(args.filename, resolve_filenames=False, - recurse=True) - print("Shared libs used (recursively) by {} are:\n{}".format(args.filename, - shared_libs)) + shared_libs = inspect_linkages( + args.filename, resolve_filenames=False, recurse=True + ) + print(f"Shared libs used (recursively) by {args.filename} are:\n{shared_libs}") return 0 return 1 def main(argv): for idx, progname in enumerate(argv[0:2][::-1]): - if re.match(r'.*ldd(?:$|\.exe|\.py)', progname): - return ldd(*argv[2 - idx:]) - elif re.match(r'.*otool(?:$|\.exe|\.py)', progname): - return otool(*argv[2 - idx:]) + if re.match(r".*ldd(?:$|\.exe|\.py)", progname): + return ldd(*argv[2 - idx :]) + elif re.match(r".*otool(?:$|\.exe|\.py)", progname): + return otool(*argv[2 - idx :]) elif os.path.isfile(progname): - klass = codefile_class(progname) - if not klass: + if not (codefile := codefile_class(progname)): return 1 - elif klass == elffile: - return ldd(*argv[1 - idx:]) - elif klass == machofile: - return otool('-L', *argv[1 - idx:]) + elif codefile == elffile: + return ldd(*argv[1 - idx :]) + elif codefile == machofile: + return otool("-L", *argv[1 - idx :]) return 1 def main_maybe_test(): - if sys.argv[1] == 'test': - import functools + if sys.argv[1] == "test": tool = sys.argv[2] - if tool != 'otool' and tool != 'ldd': - if sys.platform == 'darwin': - tool = 'otool' + if tool != "otool" and tool != "ldd": + if on_mac: + tool = "otool" else: - tool = 'ldd' + tool = "ldd" test_that = None - sysroot_args = [re.match('--sysroot=([^ ]+)', arg) for arg in sys.argv - if re.match('--sysroot=([^ ]+)', arg)] + sysroot_args = [ + re.match("--sysroot=([^ ]+)", arg) + for arg in sys.argv + if re.match("--sysroot=([^ ]+)", arg) + ] if len(sysroot_args): - sysroot, = sysroot_args[-1].groups(1) + (sysroot,) = sysroot_args[-1].groups(1) sysroot = os.path.expanduser(sysroot) else: - sysroot = '' - if tool == 'otool': - test_this = functools.partial(inspect_linkages, sysroot=sysroot, - resolve_filenames=False, recurse=False) - if sys.platform == 'darwin': - test_that = functools.partial(inspect_linkages_otool) - SOEXT = 'dylib' - elif tool == 'ldd': - test_this = functools.partial(inspect_linkages, sysroot=sysroot, - resolve_filenames=True, recurse=True) - if sys.platform.startswith('linux'): - test_that = functools.partial(inspect_linkages_ldd) - SOEXT = 'so' + sysroot = "" + if tool == "otool": + test_this = partial( + inspect_linkages, + sysroot=sysroot, + resolve_filenames=False, + recurse=False, + ) + if on_mac: + test_that = partial(inspect_linkages_otool) + SOEXT = "dylib" + elif tool == "ldd": + test_this = partial( + inspect_linkages, sysroot=sysroot, resolve_filenames=True, recurse=True + ) + if on_linux: + test_that = partial(inspect_linkages_ldd) + SOEXT = "so" # Find a load of dylibs or elfs and compare # the output against 'otool -L' or 'ldd' # codefiles = glob.glob('/usr/lib/*.'+SOEXT) - codefiles = glob.glob(sysroot + '/usr/lib/*.' + SOEXT) + codefiles = glob.glob(sysroot + "/usr/lib/*." + SOEXT) # codefiles = ['/usr/bin/file'] # Sometimes files do not exist: # (/usr/lib/libgutenprint.2.dylib -> libgutenprint.2.0.3.dylib) - codefiles = [codefile for codefile in codefiles - if not os.path.islink(codefile) or os.path.exists(os.readlink(codefile))] + codefiles = [ + codefile + for codefile in codefiles + if not os.path.islink(codefile) or os.path.exists(os.readlink(codefile)) + ] for codefile in codefiles: - print(f'\nchecking {codefile}') + print(f"\nchecking {codefile}") this = test_this(codefile) if test_that: that = test_that(codefile) else: that = this - print('\n'.join(this)) - assert set(this) == set(that),\ - "py-ldd result incorrect for {}, this:\n{}\nvs that:\n{}".\ - format(codefile, set(this), set(that)) + print("\n".join(this)) + assert ( + set(this) == set(that) + ), f"py-ldd result incorrect for {codefile}, this:\n{set(this)}\nvs that:\n{set(that)}" else: return main(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main_maybe_test()) diff --git a/conda_build/plugin.py b/conda_build/plugin.py new file mode 100644 index 0000000000..17cbcad143 --- /dev/null +++ b/conda_build/plugin.py @@ -0,0 +1,106 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from typing import TYPE_CHECKING + +import conda.plugins + +if TYPE_CHECKING: + from typing import Sequence + + +# lazy-import to avoid nasty import-time side effects when not using conda-build +def build(args: Sequence[str]) -> int: + from .cli.main_build import execute + + return execute(args) + + +def convert(args: Sequence[str]) -> int: + from .cli.main_convert import execute + + return execute(args) + + +def debug(args: Sequence[str]) -> int: + from .cli.main_debug import execute + + return execute(args) + + +def develop(args: Sequence[str]) -> int: + from .cli.main_develop import execute + + return execute(args) + + +def inspect(args: Sequence[str]) -> int: + from .cli.main_inspect import execute + + return execute(args) + + +def metapackage(args: Sequence[str]) -> int: + from .cli.main_metapackage import execute + + return execute(args) + + +def render(args: Sequence[str]) -> int: + from .cli.main_render import execute + + return execute(args) + + +def skeleton(args: Sequence[str]) -> int: + from .cli.main_skeleton import execute + + return execute(args) + + +@conda.plugins.hookimpl +def conda_subcommands(): + yield conda.plugins.CondaSubcommand( + name="build", + summary="Build conda packages from a conda recipe.", + action=build, + ) + yield conda.plugins.CondaSubcommand( + name="convert", + summary="Convert pure Python packages to other platforms (a.k.a., subdirs).", + action=convert, + ) + yield conda.plugins.CondaSubcommand( + name="debug", + summary="Debug the build or test phases of conda recipes.", + action=debug, + ) + yield conda.plugins.CondaSubcommand( + name="develop", + summary=( + "Install a Python package in 'development mode'. " + "Similar to `pip install --editable`." + ), + action=develop, + ) + yield conda.plugins.CondaSubcommand( + name="inspect", + summary="Tools for inspecting conda packages.", + action=inspect, + ) + yield conda.plugins.CondaSubcommand( + name="metapackage", + summary="Specialty tool for generating conda metapackage.", + action=metapackage, + ) + yield conda.plugins.CondaSubcommand( + name="render", + summary="Expand a conda recipe into a platform-specific recipe.", + action=render, + ) + yield conda.plugins.CondaSubcommand( + name="skeleton", + summary="Generate boilerplate conda recipes.", + action=skeleton, + ) diff --git a/conda_build/post.py b/conda_build/post.py index 6259b97ac6..67c6a355a7 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1,51 +1,82 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from copy import copy -from collections import defaultdict, OrderedDict -from functools import partial -from fnmatch import fnmatch, filter as fnmatch_filter, translate as fnmatch_translate -from os.path import (basename, dirname, exists, isabs, isdir, isfile, - islink, join, normpath, realpath, relpath, sep, splitext) +from __future__ import annotations + +import json import locale -import re import os +import re import shutil import stat -from subprocess import call, check_output, CalledProcessError import sys -try: - from os import readlink -except ImportError: - readlink = False - -from conda_build.os_utils import external -from conda_build.conda_interface import lchmod -from conda_build.conda_interface import walk_prefix -from conda_build.conda_interface import TemporaryDirectory -from conda_build.conda_interface import md5_file - -from conda_build import utils -from conda_build.os_utils.liefldd import (have_lief, get_exports_memoized, - get_linkages_memoized, get_rpaths_raw, - get_runpaths_raw, set_rpath) -from conda_build.os_utils.pyldd import codefile_type -from conda_build.os_utils.ldd import get_package_files, get_package_obj_files -from conda_build.inspect_pkg import which_package -from conda_build.exceptions import (OverLinkingError, OverDependingError, RunPathError) - -from conda_build.os_utils import macho - +from collections import OrderedDict, defaultdict +from copy import copy +from fnmatch import filter as fnmatch_filter +from fnmatch import fnmatch +from fnmatch import translate as fnmatch_translate +from functools import partial +from os.path import ( + basename, + dirname, + exists, + isabs, + isdir, + isfile, + islink, + join, + normpath, + realpath, + relpath, + sep, + splitext, +) +from pathlib import Path +from subprocess import CalledProcessError, call, check_output +from typing import TYPE_CHECKING + +from conda.core.prefix_data import PrefixData +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.link import lchmod +from conda.gateways.disk.read import compute_sum +from conda.misc import walk_prefix +from conda.models.records import PrefixRecord + +from . import utils +from .exceptions import OverDependingError, OverLinkingError, RunPathError +from .inspect_pkg import which_package +from .os_utils import external, macho +from .os_utils.liefldd import ( + get_exports_memoized, + get_linkages_memoized, + get_rpaths_raw, + get_runpaths_raw, + have_lief, + set_rpath, +) +from .os_utils.pyldd import ( + DLLfile, + EXEfile, + codefile_class, + elffile, + machofile, +) +from .utils import on_mac, on_win, prefix_files + +if TYPE_CHECKING: + from typing import Literal + + from .metadata import MetaData filetypes_for_platform = { - "win": ('DLLfile', 'EXEfile'), - "osx": ['machofile'], - "linux": ['elffile'], + "win": (DLLfile, EXEfile), + "osx": (machofile,), + "linux": (elffile,), } def fix_shebang(f, prefix, build_python, osx_is_app=False): path = join(prefix, f) - if codefile_type(path): + if codefile_class(path, skip_symlinks=True): return elif islink(path): return @@ -58,31 +89,36 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): bytes_ = False os.chmod(path, 0o775) - with open(path, mode='r+', encoding=locale.getpreferredencoding()) as fi: + with open(path, mode="r+", encoding=locale.getpreferredencoding()) as fi: try: data = fi.read(100) fi.seek(0) except UnicodeDecodeError: # file is binary return - SHEBANG_PAT = re.compile(r'^#!.+$', re.M) + SHEBANG_PAT = re.compile(r"^#!.+$", re.M) # regexp on the memory mapped file so we only read it into # memory if the regexp matches. try: - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) + mm = utils.mmap_mmap( + fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE + ) except OSError: mm = fi.read() try: m = SHEBANG_PAT.match(mm) except TypeError: - SHEBANG_PAT = re.compile(br'^#!.+$', re.M) + SHEBANG_PAT = re.compile(rb"^#!.+$", re.M) bytes_ = True m = SHEBANG_PAT.match(mm) if m: - python_pattern = (re.compile(br'\/python[w]?(?:$|\s|\Z)', re.M) if bytes_ else - re.compile(r'\/python[w]?(:$|\s|\Z)', re.M)) + python_pattern = ( + re.compile(rb"\/python[w]?(?:$|\s|\Z)", re.M) + if bytes_ + else re.compile(r"\/python[w]?(:$|\s|\Z)", re.M) + ) if not re.search(python_pattern, m.group()): return else: @@ -90,16 +126,18 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): data = mm[:] - py_exec = '#!' + ('/bin/bash ' + prefix + '/bin/pythonw' - if sys.platform == 'darwin' and osx_is_app else - prefix + '/bin/' + basename(build_python)) - if bytes_ and hasattr(py_exec, 'encode'): + py_exec = "#!" + ( + "/bin/bash " + prefix + "/bin/pythonw" + if on_mac and osx_is_app + else prefix + "/bin/" + basename(build_python) + ) + if bytes_ and hasattr(py_exec, "encode"): py_exec = py_exec.encode() new_data = SHEBANG_PAT.sub(py_exec, data, count=1) if new_data == data: return print("updating shebang:", f) - with open(path, 'w', encoding=locale.getpreferredencoding()) as fo: + with open(path, "w", encoding=locale.getpreferredencoding()) as fo: try: fo.write(new_data) except TypeError: @@ -108,10 +146,15 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): def write_pth(egg_path, config): fn = basename(egg_path) - py_ver = '.'.join(config.variant['python'].split('.')[:2]) - with open(join(utils.get_site_packages(config.host_prefix, py_ver), - '%s.pth' % (fn.split('-')[0])), 'w') as fo: - fo.write('./%s\n' % fn) + py_ver = ".".join(config.variant["python"].split(".")[:2]) + with open( + join( + utils.get_site_packages(config.host_prefix, py_ver), + "{}.pth".format(fn.split("-")[0]), + ), + "w", + ) as fo: + fo.write(f"./{fn}\n") def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -120,24 +163,25 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): itself """ absfiles = [join(prefix, f) for f in files] - py_ver = '.'.join(config.variant['python'].split('.')[:2]) + py_ver = ".".join(config.variant["python"].split(".")[:2]) sp_dir = utils.get_site_packages(prefix, py_ver) - for egg_path in utils.glob(join(sp_dir, '*-py*.egg')): + for egg_path in utils.glob(join(sp_dir, "*-py*.egg")): if isdir(egg_path): - if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i - in walk_prefix(egg_path, False, windows_forward_slashes=False)): + if preserve_egg_dir or not any( + join(egg_path, i) in absfiles + for i in walk_prefix(egg_path, False, windows_forward_slashes=False) + ): write_pth(egg_path, config=config) continue - print('found egg dir:', egg_path) + print("found egg dir:", egg_path) try: - shutil.move(join(egg_path, 'EGG-INFO'), - egg_path + '-info') + shutil.move(join(egg_path, "EGG-INFO"), egg_path + "-info") except OSError: pass - utils.rm_rf(join(egg_path, 'EGG-INFO')) + utils.rm_rf(join(egg_path, "EGG-INFO")) for fn in os.listdir(egg_path): - if fn == '__pycache__': + if fn == "__pycache__": utils.rm_rf(join(egg_path, fn)) else: # this might be a name-space package @@ -145,34 +189,37 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): # from another installed dependency if exists(join(sp_dir, fn)): try: - utils.copy_into(join(egg_path, fn), - join(sp_dir, fn), config.timeout, - locking=config.locking) + utils.copy_into( + join(egg_path, fn), + join(sp_dir, fn), + config.timeout, + locking=config.locking, + ) utils.rm_rf(join(egg_path, fn)) except OSError as e: fn = basename(str(e).split()[-1]) - raise OSError("Tried to merge folder {egg_path} into {sp_dir}, but {fn}" - " exists in both locations. Please either add " - "build/preserve_egg_dir: True to meta.yaml, or manually " - "remove the file during your install process to avoid " - "this conflict." - .format(egg_path=egg_path, sp_dir=sp_dir, fn=fn)) + raise OSError( + f"Tried to merge folder {egg_path} into {sp_dir}, but {fn}" + " exists in both locations. Please either add " + "build/preserve_egg_dir: True to meta.yaml, or manually " + "remove the file during your install process to avoid " + "this conflict." + ) else: shutil.move(join(egg_path, fn), join(sp_dir, fn)) elif isfile(egg_path): if egg_path not in absfiles: continue - print('found egg:', egg_path) + print("found egg:", egg_path) write_pth(egg_path, config=config) - installer_files = [f for f in absfiles - if f.endswith(f".dist-info{sep}INSTALLER")] + installer_files = [f for f in absfiles if f.endswith(f".dist-info{sep}INSTALLER")] for file in installer_files: - with open(file, 'w') as f: - f.write('conda') + with open(file, "w") as f: + f.write("conda") - utils.rm_rf(join(sp_dir, 'easy-install.pth')) + utils.rm_rf(join(sp_dir, "easy-install.pth")) def rm_py_along_so(prefix): @@ -180,8 +227,8 @@ def rm_py_along_so(prefix): files = list(os.scandir(prefix)) for fn in files: - if fn.is_file() and fn.name.endswith(('.so', '.pyd')): - for ext in '.py', '.pyc', '.pyo': + if fn.is_file() and fn.name.endswith((".so", ".pyd")): + for ext in ".py", ".pyc", ".pyo": name, _ = splitext(fn.path) name = normpath(name + ext) if any(name == normpath(f) for f in files): @@ -196,23 +243,23 @@ def rm_pyo(files, prefix): optimize = 1 .. in setup.cfg in which case we can end up with some stdlib __pycache__ files ending in .opt-N.pyc on Python 3, as well as .pyo files for the - package's own python. """ - re_pyo = re.compile(r'.*(?:\.pyo$|\.opt-[0-9]\.pyc)') + package's own python.""" + re_pyo = re.compile(r".*(?:\.pyo$|\.opt-[0-9]\.pyc)") for fn in files: if re_pyo.match(fn): os.unlink(join(prefix, fn)) def rm_pyc(files, prefix): - re_pyc = re.compile(r'.*(?:\.pyc$)') + re_pyc = re.compile(r".*(?:\.pyc$)") for fn in files: if re_pyc.match(fn): os.unlink(join(prefix, fn)) def rm_share_info_dir(files, prefix): - if 'share/info/dir' in files: - fn = join(prefix, 'share', 'info', 'dir') + if "share/info/dir" in files: + fn = join(prefix, "share", "info", "dir") if isfile(fn): os.unlink(fn) @@ -228,25 +275,31 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): unskipped_files = set(files) - skipped_files for fn in unskipped_files: # omit files in Library/bin, Scripts, and the root prefix - they are not generally imported - if sys.platform == 'win32': - if any([fn.lower().startswith(start) for start in ['library/bin', 'library\\bin', - 'scripts']]): + if on_win: + if any( + [ + fn.lower().startswith(start) + for start in ["library/bin", "library\\bin", "scripts"] + ] + ): continue else: - if fn.startswith('bin'): + if fn.startswith("bin"): continue - cache_prefix = ("__pycache__" + os.sep) - if (fn.endswith(".py") and - dirname(fn) + cache_prefix + basename(fn) + 'c' not in files): + cache_prefix = "__pycache__" + os.sep + if ( + fn.endswith(".py") + and dirname(fn) + cache_prefix + basename(fn) + "c" not in files + ): compile_files.append(fn) if compile_files: if not isfile(python_exe): - print('compiling .pyc files... failed as no python interpreter was found') + print("compiling .pyc files... failed as no python interpreter was found") else: - print('compiling .pyc files...') + print("compiling .pyc files...") # We avoid command lines longer than 8190 - if sys.platform == 'win32': + if on_win: limit = 8190 else: limit = 32760 @@ -255,8 +308,8 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): if limit < lower_limit: limit = lower_limit groups = [[]] - args = [python_exe, '-Wi', '-m', 'py_compile'] - args_len = length = len(' '.join(args)) + 1 + args = [python_exe, "-Wi", "-m", "py_compile"] + args_len = length = len(" ".join(args)) + 1 for f in compile_files: length_this = len(f) + 1 if length_this + length > limit: @@ -271,27 +324,40 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): def check_dist_info_version(name, version, files): for f in files: - if f.endswith('.dist-info' + os.sep + 'METADATA'): + if f.endswith(".dist-info" + os.sep + "METADATA"): f_lower = basename(dirname(f).lower()) - if f_lower.startswith(name + '-'): - f_lower, _, _ = f_lower.rpartition('.dist-info') - _, distname, f_lower = f_lower.rpartition(name + '-') + if f_lower.startswith(name + "-"): + f_lower, _, _ = f_lower.rpartition(".dist-info") + _, distname, f_lower = f_lower.rpartition(name + "-") if distname == name and version != f_lower: - print(f"ERROR: Top level dist-info version incorrect (is {f_lower}, should be {version})") + print( + f"ERROR: Top level dist-info version incorrect (is {f_lower}, should be {version})" + ) sys.exit(1) else: return -def post_process(name, version, files, prefix, config, preserve_egg_dir=False, noarch=False, skip_compile_pyc=()): +def post_process( + name, + version, + files, + prefix, + config, + preserve_egg_dir=False, + noarch=False, + skip_compile_pyc=(), +): rm_pyo(files, prefix) if noarch: rm_pyc(files, prefix) else: - python_exe = (config.build_python if isfile(config.build_python) else - config.host_python) - compile_missing_pyc(files, cwd=prefix, python_exe=python_exe, - skip_compile_pyc=skip_compile_pyc) + python_exe = ( + config.build_python if isfile(config.build_python) else config.host_python + ) + compile_missing_pyc( + files, cwd=prefix, python_exe=python_exe, skip_compile_pyc=skip_compile_pyc + ) remove_easy_install_pth(files, prefix, config, preserve_egg_dir=preserve_egg_dir) rm_py_along_so(prefix) rm_share_info_dir(files, prefix) @@ -300,23 +366,23 @@ def post_process(name, version, files, prefix, config, preserve_egg_dir=False, n def find_lib(link, prefix, files, path=None): if link.startswith(prefix): - link = normpath(link[len(prefix) + 1:]) + link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") return link - if link.startswith('/'): # but doesn't start with the build prefix + if link.startswith("/"): # but doesn't start with the build prefix return - if link.startswith('@rpath/'): + if link.startswith("@rpath/"): # Assume the rpath already points to lib, so there is no need to # change it. return - if '/' not in link or link.startswith('@executable_path/'): + if "/" not in link or link.startswith("@executable_path/"): link = basename(link) file_names = defaultdict(list) for f in files: file_names[basename(f)].append(f) if link not in file_names: - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it @@ -325,27 +391,34 @@ def find_lib(link, prefix, files, path=None): # multiple places. md5s = set() for f in file_names[link]: - md5s.add(md5_file(join(prefix, f))) + md5s.add(compute_sum(join(prefix, f), "md5")) if len(md5s) > 1: - sys.exit(f"Error: Found multiple instances of {link}: {file_names[link]}") + sys.exit( + f"Error: Found multiple instances of {link}: {file_names[link]}" + ) else: file_names[link].sort() - print("Found multiple instances of %s (%s). " - "Choosing the first one." % (link, file_names[link])) + print( + f"Found multiple instances of {link} ({file_names[link]}). " + "Choosing the first one." + ) return file_names[link][0] - print("Don't know how to find %s, skipping" % link) + print(f"Don't know how to find {link}, skipping") def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): - link = link_dict['name'] + link = link_dict["name"] if build_prefix != host_prefix and link.startswith(build_prefix): link = link.replace(build_prefix, host_prefix) print(f"Fixing linking of {link} in {path}") - print(".. seems to be linking to a compiler runtime, replacing build prefix with " - "host prefix and") - if not codefile_type(link): - sys.exit("Error: Compiler runtime library in build prefix not found in host prefix %s" - % link) + print( + ".. seems to be linking to a compiler runtime, replacing build prefix with " + "host prefix and" + ) + if not codefile_class(link, skip_symlinks=True): + sys.exit( + f"Error: Compiler runtime library in build prefix not found in host prefix {link}" + ) else: print(f".. fixing linking of {link} in {path} instead") @@ -355,9 +428,9 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): return print(f"Fixing linking of {link} in {path}") - print("New link location is %s" % (link_loc)) + print(f"New link location is {link_loc}") - lib_to_link = relpath(dirname(link_loc), 'lib') + lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) # e.g., if @@ -377,30 +450,32 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): # @loader_path/path_to_lib/lib_to_link/basename(link), like # @loader_path/../../things/libthings.dylib. - ret = f'@rpath/{lib_to_link}/{basename(link)}' + ret = f"@rpath/{lib_to_link}/{basename(link)}" # XXX: IF the above fails for whatever reason, the below can be used # TODO: This might contain redundant ..'s if link and path are both in # some subdirectory of lib. # ret = '@loader_path/%s/%s/%s' % (path_to_lib, lib_to_link, basename(link)) - ret = ret.replace('/./', '/') + ret = ret.replace("/./", "/") return ret -def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): +def mk_relative_osx(path, host_prefix, m, files, rpaths=("lib",)): base_prefix = m.config.build_folder assert base_prefix == dirname(host_prefix) build_prefix = m.config.build_prefix prefix = build_prefix if exists(build_prefix) else host_prefix names = macho.otool(path, prefix) - s = macho.install_name_change(path, prefix, - partial(osx_ch_link, - host_prefix=host_prefix, - build_prefix=build_prefix, - files=files), - dylibs=names) + s = macho.install_name_change( + path, + prefix, + partial( + osx_ch_link, host_prefix=host_prefix, build_prefix=build_prefix, files=files + ), + dylibs=names, + ) if names: existing_rpaths = macho.get_rpaths(path, build_prefix=prefix) @@ -408,16 +483,18 @@ def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): # being found. for rpath in rpaths: # Escape hatch for when you really don't want any rpaths added. - if rpath == '': + if rpath == "": continue - rpath_new = join('@loader_path', - relpath(join(host_prefix, rpath), dirname(path)), - '').replace('/./', '/') + rpath_new = join( + "@loader_path", relpath(join(host_prefix, rpath), dirname(path)), "" + ).replace("/./", "/") macho.add_rpath(path, rpath_new, build_prefix=prefix, verbose=True) full_rpath = join(host_prefix, rpath) for existing_rpath in existing_rpaths: if normpath(existing_rpath) == normpath(full_rpath): - macho.delete_rpath(path, existing_rpath, build_prefix=prefix, verbose=True) + macho.delete_rpath( + path, existing_rpath, build_prefix=prefix, verbose=True + ) for rpath in existing_rpaths: if rpath.startswith(base_prefix) and not rpath.startswith(host_prefix): @@ -428,7 +505,7 @@ def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): assert_relative_osx(path, host_prefix, build_prefix) -''' +""" # Both patchelf and LIEF have bugs in them. Neither can be used on all binaries we have seen. # This code tries each and tries to keep count of which worked between the original binary and # patchelf-patched, LIEF-patched versions. @@ -443,7 +520,14 @@ def check_binary(binary, expected=None): print("trying {}".format(binary)) # import pdb; pdb.set_trace() try: - txt = check_output([sys.executable, '-c', 'from ctypes import cdll; cdll.LoadLibrary("' + binary + '")'], timeout=2) + txt = check_output( + [ + sys.executable, + '-c', + 'from ctypes import cdll; cdll.LoadLibrary("' + binary + '")' + ], + timeout=2, + ) # mydll = cdll.LoadLibrary(binary) except Exception as e: print(e) @@ -482,77 +566,78 @@ def check_binary_patchers(elf, prefix, rpath): if original == pelf and works: worksd['patchelf'] += 1 print('\n' + str(worksd) + '\n') -''' +""" -def mk_relative_linux(f, prefix, rpaths=('lib',), method=None): - 'Respects the original values and converts abs to $ORIGIN-relative' +def mk_relative_linux(f, prefix, rpaths=("lib",), method=None): + "Respects the original values and converts abs to $ORIGIN-relative" elf = join(prefix, f) origin = dirname(elf) existing_pe = None - patchelf = external.find_executable('patchelf', prefix) + patchelf = external.find_executable("patchelf", prefix) if not patchelf: - print(f"ERROR :: You should install patchelf, will proceed with LIEF for {elf} (was {method})") - method = 'LIEF' + print( + f"ERROR :: You should install patchelf, will proceed with LIEF for {elf} (was {method})" + ) + method = "LIEF" else: try: - existing_pe = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0] + existing_pe = ( + check_output([patchelf, "--print-rpath", elf]) + .decode("utf-8") + .splitlines()[0] + ) except CalledProcessError: - if method == 'patchelf': - print("ERROR :: `patchelf --print-rpath` failed for {}, but patchelf was specified".format( - elf)) - elif method != 'LIEF': - print("WARNING :: `patchelf --print-rpath` failed for {}, will proceed with LIEF (was {})".format( - elf, method)) - method = 'LIEF' + if method == "patchelf": + print( + f"ERROR :: `patchelf --print-rpath` failed for {elf}, but patchelf was specified" + ) + elif method != "LIEF": + print( + f"WARNING :: `patchelf --print-rpath` failed for {elf}, will proceed with LIEF (was {method})" + ) + method = "LIEF" else: existing_pe = existing_pe.split(os.pathsep) existing = existing_pe if have_lief: existing2, _, _ = get_rpaths_raw(elf) if existing_pe and existing_pe != existing2: - print('WARNING :: get_rpaths_raw()={} and patchelf={} disagree for {} :: '.format( - existing2, existing_pe, elf)) + print( + f"WARNING :: get_rpaths_raw()={existing2} and patchelf={existing_pe} disagree for {elf} :: " + ) # Use LIEF if method is LIEF to get the initial value? - if method == 'LIEF': + if method == "LIEF": existing = existing2 new = [] for old in existing: - if old.startswith('$ORIGIN'): + if old.startswith("$ORIGIN"): new.append(old) - elif old.startswith('/'): + elif old.startswith("/"): # Test if this absolute path is outside of prefix. That is fatal. rp = relpath(old, prefix) - if rp.startswith('..' + os.sep): - print(f'Warning: rpath {old} is outside prefix {prefix} (removing it)') + if rp.startswith(".." + os.sep): + print(f"Warning: rpath {old} is outside prefix {prefix} (removing it)") else: - rp = '$ORIGIN/' + relpath(old, origin) + rp = "$ORIGIN/" + relpath(old, origin) if rp not in new: new.append(rp) # Ensure that the asked-for paths are also in new. for rpath in rpaths: - if rpath != '': - if not rpath.startswith('/'): - # IMHO utils.relative shouldn't exist, but I am too paranoid to remove - # it, so instead, make sure that what I think it should be replaced by - # gives the same result and assert if not. Yeah, I am a chicken. - rel_ours = normpath(utils.relative(f, rpath)) - rel_stdlib = normpath(relpath(rpath, dirname(f))) - if not rel_ours == rel_stdlib: - raise ValueError('utils.relative {} and relpath {} disagree for {}, {}'.format( - rel_ours, rel_stdlib, f, rpath)) - rpath = '$ORIGIN/' + rel_stdlib + if rpath != "": + if not rpath.startswith("/"): + rpath = "$ORIGIN/" + normpath(relpath(rpath, dirname(f))) if rpath not in new: new.append(rpath) - rpath = ':'.join(new) + rpath = ":".join(new) # check_binary_patchers(elf, prefix, rpath) - if not patchelf or (method and method.upper() == 'LIEF'): - set_rpath(old_matching='*', new_rpath=rpath, file=elf) + if not patchelf or (method and method.upper() == "LIEF"): + set_rpath(old_matching="*", new_rpath=rpath, file=elf) else: - call([patchelf, '--force-rpath', '--set-rpath', rpath, elf]) + call([patchelf, "--force-rpath", "--set-rpath", rpath, elf]) def assert_relative_osx(path, host_prefix, build_prefix): @@ -560,32 +645,70 @@ def assert_relative_osx(path, host_prefix, build_prefix): for name in macho.get_dylibs(path, tools_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): - raise RuntimeError("library at %s appears to have an absolute path embedded" % path) - - -def determine_package_nature(pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls): - run_exports = None - lib_prefix = pkg.name.startswith('lib') - codefiles = get_package_obj_files(pkg, prefix) - # get_package_obj_files already filters by extension and I'm not sure we need two. - dsos = [f for f in codefiles for ext in ('.dylib', '.so', '.dll', '.pyd') if ext in f] - # TODO :: Is this package not in a channel somewhere at this point? It would be good not to be special - # casing like this. Clearly we aren't able to get run_exports for starters and that's not good - if not isinstance(pkg, FakeDist): - # we don't care about the actual run_exports value, just whether or not run_exports are present. - json_file = os.path.join(prefix, 'conda-meta', pkg.dist_name + '.json') - import json - assert os.path.isfile(json_file), f"conda-meta :: Not a file: {json_file}" - json_info = json.loads(open(json_file).read()) - epd = json_info['extracted_package_dir'] - run_exports_json = os.path.join(epd, 'info', 'run_exports.json') - if os.path.isfile(run_exports_json): - run_exports = json.loads(open(run_exports_json).read()) - return (dsos, run_exports, lib_prefix) - - -def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls): - ''' + raise RuntimeError( + f"library at {path} appears to have an absolute path embedded" + ) + + +def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: + return { + file + for file in prec["files"] + if codefile_class(Path(prefix, file), skip_symlinks=True) + # codefile_class already filters by extension/binary type, do we need this second filter? + for ext in (".dylib", ".so", ".dll", ".pyd") + if ext in file + } + + +def get_run_exports( + prec: PrefixRecord, + prefix: str | os.PathLike | Path, +) -> tuple[str, ...]: + json_file = Path( + prefix, + "conda-meta", + f"{prec.name}-{prec.version}-{prec.build}.json", + ) + try: + json_info = json.loads(json_file.read_text()) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + # raise CondaBuildException(f"Not a file: {json_file}") + # is this a "fake" PrefixRecord? + # i.e. this is the package being built and hasn't been "installed" to disk? + return () + + run_exports_json = Path( + json_info["extracted_package_dir"], + "info", + "run_exports.json", + ) + try: + return tuple(json.loads(run_exports_json.read_text())) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + return () + + +def library_nature( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> Literal[ + "interpreter (Python)" + | "interpreter (R)" + | "run-exports library" + | "dso library" + | "plugin library (Python,R)" + | "plugin library (Python)" + | "plugin library (R)" + | "interpreted library (Python,R)" + | "interpreted library (Python)" + | "interpreted library (R)" + | "non-library" +]: + """ Result :: "non-library", "interpreted library (Python|R|Python,R)", "plugin library (Python|R|Python,R)", @@ -594,74 +717,44 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url "interpreter (R)" "interpreter (Python)" .. in that order, i.e. if have both dsos and run_exports, it's a run_exports_library. - ''' - dsos, run_exports, _ = determine_package_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) - if pkg.name == 'python': + """ + if prec.name == "python": return "interpreter (Python)" - elif pkg.name == 'r-base': + elif prec.name == "r-base": return "interpreter (R)" - if run_exports: + elif get_run_exports(prec, prefix): return "run-exports library" - elif len(dsos): + elif dsos := get_dsos(prec, prefix): # If all DSOs are under site-packages or R/lib/ - python_dsos = [dso for dso in dsos if 'site-packages' in dso] - r_dsos = [dso for dso in dsos if 'lib/R/library' in dso] - dsos_without_plugins = [dso for dso in dsos if dso not in r_dsos + python_dsos] - if len(dsos_without_plugins): + python_dsos = {dso for dso in dsos if "site-packages" in dso} + r_dsos = {dso for dso in dsos if "lib/R/library" in dso} + if dsos - python_dsos - r_dsos: return "dso library" - else: - if python_dsos and r_dsos: - return "plugin library (Python,R)" - elif python_dsos: - return "plugin library (Python)" - elif r_dsos: - return "plugin library (R)" + elif python_dsos and r_dsos: + return "plugin library (Python,R)" + elif python_dsos: + return "plugin library (Python)" + elif r_dsos: + return "plugin library (R)" else: - files = get_package_files(pkg, prefix) - python_files = [f for f in files if 'site-packages' in f] - r_files = [f for f in files if 'lib/R/library' in f] + python_files = {file for file in prec["files"] if "site-packages" in file} + r_files = {file for file in prec["files"] if "lib/R/library" in file} if python_files and r_files: return "interpreted library (Python,R)" elif python_files: return "interpreted library (Python)" elif r_files: return "interpreted library (R)" - return "non-library" -def dists_from_names(names, prefix): - results = [] - from conda_build.utils import linked_data_no_multichannels - pkgs = linked_data_no_multichannels(prefix) - for name in names: - for pkg in pkgs: - if pkg.quad[0] == name: - results.append(pkg) - return results - - -class FakeDist: - def __init__(self, name, version, build_number, build_str, channel, files): - self.name = name - self.quad = [name] - self.version = version - self.build_number = build_number - self.build_string = build_str - self.channel = channel - self.files = files - - def get(self, name): - if name == 'files': - return self.files - - # This is really just a small, fixed sysroot and it is rooted at ''. `libcrypto.0.9.8.dylib` should not be in it IMHO. -DEFAULT_MAC_WHITELIST = ['/opt/X11/', - '/usr/lib/libSystem.B.dylib', - '/usr/lib/libcrypto.0.9.8.dylib', - '/usr/lib/libobjc.A.dylib', -""" +DEFAULT_MAC_WHITELIST = [ + "/opt/X11/", + "/usr/lib/libSystem.B.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libobjc.A.dylib", + """ '/System/Library/Frameworks/Accelerate.framework/*', '/System/Library/Frameworks/AGL.framework/*', '/System/Library/Frameworks/AppKit.framework/*', @@ -699,50 +792,64 @@ def get(self, name): '/System/Library/Frameworks/StoreKit.framework/*', '/System/Library/Frameworks/SystemConfiguration.framework/*', '/System/Library/Frameworks/WebKit.framework/*' -""" - ] +""", +] # Should contain the System32/SysWOW64 DLLs present on a clean installation of the # oldest version of Windows that we support (or are currently) building packages for. -DEFAULT_WIN_WHITELIST = ['**/ADVAPI32.dll', - '**/bcrypt.dll', - '**/COMCTL32.dll', - '**/COMDLG32.dll', - '**/CRYPT32.dll', - '**/dbghelp.dll', - '**/GDI32.dll', - '**/IMM32.dll', - '**/KERNEL32.dll', - '**/NETAPI32.dll', - '**/ole32.dll', - '**/OLEAUT32.dll', - '**/PSAPI.DLL', - '**/RPCRT4.dll', - '**/SHELL32.dll', - '**/USER32.dll', - '**/USERENV.dll', - '**/WINHTTP.dll', - '**/WS2_32.dll', - '**/ntdll.dll', - '**/msvcrt.dll'] - - -def _collect_needed_dsos(sysroots_files, files, run_prefix, sysroot_substitution, build_prefix, build_prefix_substitution): +DEFAULT_WIN_WHITELIST = [ + "**/ADVAPI32.dll", + "**/bcrypt.dll", + "**/COMCTL32.dll", + "**/COMDLG32.dll", + "**/CRYPT32.dll", + "**/dbghelp.dll", + "**/GDI32.dll", + "**/IMM32.dll", + "**/KERNEL32.dll", + "**/NETAPI32.dll", + "**/ole32.dll", + "**/OLEAUT32.dll", + "**/PSAPI.DLL", + "**/RPCRT4.dll", + "**/SHELL32.dll", + "**/USER32.dll", + "**/USERENV.dll", + "**/WINHTTP.dll", + "**/WS2_32.dll", + "**/ntdll.dll", + "**/msvcrt.dll", +] + + +def _collect_needed_dsos( + sysroots_files, + files, + run_prefix, + sysroot_substitution, + build_prefix, + build_prefix_substitution, +): all_needed_dsos = set() needed_dsos_for_file = dict() - sysroots = '' + sysroots = "" if sysroots_files: sysroots = list(sysroots_files.keys())[0] for f in files: path = join(run_prefix, f) - if not codefile_type(path): + if not codefile_class(path, skip_symlinks=True): continue - build_prefix = build_prefix.replace(os.sep, '/') - run_prefix = run_prefix.replace(os.sep, '/') - needed = get_linkages_memoized(path, resolve_filenames=True, recurse=False, - sysroot=sysroots, envroot=run_prefix) + build_prefix = build_prefix.replace(os.sep, "/") + run_prefix = run_prefix.replace(os.sep, "/") + needed = get_linkages_memoized( + path, + resolve_filenames=True, + recurse=False, + sysroot=sysroots, + envroot=run_prefix, + ) for lib, res in needed.items(): - resolved = res['resolved'].replace(os.sep, '/') + resolved = res["resolved"].replace(os.sep, "/") for sysroot, sysroot_files in sysroots_files.items(): if resolved.startswith(sysroot): resolved = resolved.replace(sysroot, sysroot_substitution) @@ -753,17 +860,27 @@ def _collect_needed_dsos(sysroots_files, files, run_prefix, sysroot_substitution if build_prefix != run_prefix and resolved.startswith(build_prefix): resolved = resolved.replace(build_prefix, build_prefix_substitution) if resolved.startswith(run_prefix): - resolved = relpath(resolved, run_prefix).replace(os.sep, '/') + resolved = relpath(resolved, run_prefix).replace(os.sep, "/") # If resolved still starts with '$RPATH' then that means we will either find it in # the whitelist or it will present as an error later. - res['resolved'] = resolved + res["resolved"] = resolved needed_dsos_for_file[f] = needed - all_needed_dsos = all_needed_dsos.union({info['resolved'] for f, info in needed.items()}) + all_needed_dsos = all_needed_dsos.union( + {info["resolved"] for f, info in needed.items()} + ) return all_needed_dsos, needed_dsos_for_file -def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, - sysroot_substitution, enable_static): +def _map_file_to_package( + files, + run_prefix, + build_prefix, + all_needed_dsos, + pkg_vendored_dist, + ignore_list_syms, + sysroot_substitution, + enable_static, +): # Form a mapping of file => package prefix_owners = {} @@ -780,24 +897,31 @@ def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_v for subdir2, _, filez in os.walk(prefix): for file in filez: fp = join(subdir2, file) - dynamic_lib = any(fnmatch(fp, ext) for ext in ('*.so*', '*.dylib*', '*.dll')) and \ - codefile_type(fp, skip_symlinks=False) is not None - static_lib = any(fnmatch(fp, ext) for ext in ('*.a', '*.lib')) + dynamic_lib = any( + fnmatch(fp, ext) for ext in ("*.so*", "*.dylib*", "*.dll") + ) and codefile_class(fp, skip_symlinks=False) + static_lib = any(fnmatch(fp, ext) for ext in ("*.a", "*.lib")) # Looking at all the files is very slow. if not dynamic_lib and not static_lib: continue - rp = normpath(relpath(fp, prefix)).replace('\\', '/') - if dynamic_lib and not any(rp.lower() == w for w in all_needed_dsos_lower): + rp = normpath(relpath(fp, prefix)).replace("\\", "/") + if dynamic_lib and not any( + rp.lower() == w for w in all_needed_dsos_lower + ): continue if any(rp == normpath(w) for w in all_lib_exports[prefix]): continue - rp_po = rp.replace('\\', '/') - owners = prefix_owners[prefix][rp_po] if rp_po in prefix_owners[prefix] else [] + rp_po = rp.replace("\\", "/") + owners = ( + prefix_owners[prefix][rp_po] + if rp_po in prefix_owners[prefix] + else [] + ) # Self-vendoring, not such a big deal but may as well report it? if not len(owners): if any(rp == normpath(w) for w in files): owners.append(pkg_vendored_dist) - new_pkgs = list(which_package(rp, prefix, avoid_canonical_channel_name=True)) + new_pkgs = list(which_package(rp, prefix)) # Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package # [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners # and not any([fnmatch(new_pkg.name, i) for i in ignore_for_statics])] @@ -806,41 +930,37 @@ def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_v owners.append(new_pkg) prefix_owners[prefix][rp_po] = owners if len(prefix_owners[prefix][rp_po]): - exports = {e for e in get_exports_memoized(fp, enable_static=enable_static) if not - any(fnmatch(e, pattern) for pattern in ignore_list_syms)} + exports = { + e + for e in get_exports_memoized( + fp, enable_static=enable_static + ) + if not any( + fnmatch(e, pattern) for pattern in ignore_list_syms + ) + } all_lib_exports[prefix][rp_po] = exports - # Check codefile_type to filter out linker scripts. + # Check codefile_class to filter out linker scripts. if dynamic_lib: contains_dsos[prefix_owners[prefix][rp_po][0]] = True elif static_lib: if sysroot_substitution in fp: - if (prefix_owners[prefix][rp_po][0].name.startswith('gcc_impl_linux') or - prefix_owners[prefix][rp_po][0].name == 'llvm'): + if ( + prefix_owners[prefix][rp_po][0].name.startswith( + "gcc_impl_linux" + ) + or prefix_owners[prefix][rp_po][0].name == "llvm" + ): continue - print(f"sysroot in {fp}, owner is {prefix_owners[prefix][rp_po][0]}") + print( + f"sysroot in {fp}, owner is {prefix_owners[prefix][rp_po][0]}" + ) # Hmm, not right, muddies the prefixes again. contains_static_libs[prefix_owners[prefix][rp_po][0]] = True return prefix_owners, contains_dsos, contains_static_libs, all_lib_exports -def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, files): - pkg_vendoring_name = pkg_name - pkg_vendoring_version = str(pkg_version) - pkg_vendoring_build_str = build_str - pkg_vendoring_build_number = build_number - pkg_vendoring_key = '-'.join([pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_str]) - - return FakeDist(pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_number, - pkg_vendoring_build_str, - channel, - files), pkg_vendoring_key - - def _print_msg(errors, text, verbose): if text.startswith(" ERROR"): errors.append(text) @@ -849,19 +969,35 @@ def _print_msg(errors, text, verbose): def caseless_sepless_fnmatch(paths, pat): - pat = pat.replace('\\', '/') + pat = pat.replace("\\", "/") match = re.compile("(?i)" + fnmatch_translate(pat)).match - matches = [path for path in paths if (path.replace('\\', '/') == pat) or match(path.replace('\\', '/'))] + matches = [ + path + for path in paths + if (path.replace("\\", "/") == pat) or match(path.replace("\\", "/")) + ] return matches -def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_files, msg_prelude, info_prelude, - sysroot_prefix, sysroot_substitution, subdir, verbose): +def _lookup_in_sysroots_and_whitelist( + errors, + whitelist, + needed_dso, + sysroots_files, + msg_prelude, + info_prelude, + sysroot_prefix, + sysroot_substitution, + subdir, + verbose, +): # A system or ignored dependency. We should be able to find it in one of the CDT or # compiler packages on linux or in a sysroot folder on other OSes. These usually # start with '$RPATH/' which indicates pyldd did not find them, so remove that now. if needed_dso.startswith(sysroot_substitution): - replacements = [sysroot_substitution] + [sysroot for sysroot, _ in sysroots_files.items()] + replacements = [sysroot_substitution] + [ + sysroot for sysroot, _ in sysroots_files.items() + ] else: replacements = [needed_dso] in_whitelist = False @@ -870,20 +1006,25 @@ def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_fi # Check if we have a CDT package or a file in a sysroot. sysroot_files = [] for sysroot, files in sysroots_files.items(): - sysroot_os = sysroot.replace('\\', os.sep) + sysroot_os = sysroot.replace("\\", os.sep) if needed_dso.startswith(sysroot_substitution): # Do we want to do this replace? - sysroot_files.append(needed_dso.replace(sysroot_substitution, sysroot_os)) + sysroot_files.append( + needed_dso.replace(sysroot_substitution, sysroot_os) + ) else: found = caseless_sepless_fnmatch(files, needed_dso[1:]) sysroot_files.extend(found) if len(sysroot_files): in_sysroots = True - if subdir.startswith('osx-') or 'win' in subdir: + if subdir.startswith("osx-") or "win" in subdir: in_prefix_dso = sysroot_files[0] n_dso_p = f"Needed DSO {in_prefix_dso}" - _print_msg(errors, '{}: {} found in $SYSROOT'. - format(info_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in $SYSROOT", + verbose=verbose, + ) else: # Removing sysroot_prefix is only for Linux, though we could # use CONDA_BUILD_SYSROOT for macOS. We should figure out what to do about @@ -892,185 +1033,310 @@ def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_fi for idx in range(len(sysroot_files)): # in_prefix_dso = normpath(sysroot_files[idx].replace( # sysroot_prefix + os.sep, '')) - in_prefix_dso = sysroot_files[idx][len(sysroot_prefix) + 1:] + in_prefix_dso = sysroot_files[idx][len(sysroot_prefix) + 1 :] n_dso_p = f"Needed DSO {in_prefix_dso}" _pkgs = list(which_package(in_prefix_dso, sysroot_prefix)) if len(_pkgs) > 0: pkgs.extend(_pkgs) break if len(pkgs): - _print_msg(errors, '{}: {} found in CDT/compiler package {}'. - format(info_prelude, n_dso_p, pkgs[0]), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in CDT/compiler package {pkgs[0]}", + verbose=verbose, + ) else: - _print_msg(errors, '{}: {} not found in any CDT/compiler package,' - ' nor the whitelist?!'. - format(msg_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{msg_prelude}: {n_dso_p} not found in any CDT/compiler package," + " nor the whitelist?!", + verbose=verbose, + ) if not in_sysroots: # It takes a very long time to glob in C:/Windows so we do not do that. for replacement in replacements: - needed_dso_w = needed_dso.replace(sysroot_substitution, replacement + '/') + needed_dso_w = needed_dso.replace(sysroot_substitution, replacement + "/") # We should pass in multiple paths at once to this, but the code isn't structured for that. - in_whitelist = any([caseless_sepless_fnmatch([needed_dso_w], w) for w in whitelist]) + in_whitelist = any( + [caseless_sepless_fnmatch([needed_dso_w], w) for w in whitelist] + ) if in_whitelist: n_dso_p = f"Needed DSO {needed_dso_w}" - _print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in the whitelist", + verbose=verbose, + ) break if not in_whitelist and not in_sysroots: - _print_msg(errors, "{}: {} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" - ".. is this binary repackaging?". - format(msg_prelude, needed_dso), verbose=verbose) - - -def _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, - warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used): + _print_msg( + errors, + f"{msg_prelude}: {needed_dso} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" + ".. is this binary repackaging?", + verbose=verbose, + ) + + +def _lookup_in_prefix_packages( + errors, + needed_dso, + files, + run_prefix, + whitelist, + info_prelude, + msg_prelude, + warn_prelude, + verbose, + requirements_run, + lib_packages, + lib_packages_used, +): in_prefix_dso = normpath(needed_dso) - n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace('\\', '/')) + n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace("\\", "/")) and_also = " (and also in this package)" if in_prefix_dso in files else "" - pkgs = list(which_package(in_prefix_dso, run_prefix, avoid_canonical_channel_name=True)) - in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in requirements_run] + precs = list(which_package(in_prefix_dso, run_prefix)) + precs_in_reqs = [prec for prec in precs if prec.name in requirements_run] # TODO :: metadata build/inherit_child_run_exports (for vc, mro-base-impl). - for pkg in in_pkgs_in_run_reqs: - if pkg in lib_packages: - lib_packages_used.add(pkg) + for prec in precs_in_reqs: + if prec in lib_packages: + lib_packages_used.add(prec) in_whitelist = any([fnmatch(in_prefix_dso, w) for w in whitelist]) - if len(in_pkgs_in_run_reqs) == 1: - _print_msg(errors, '{}: {} found in {}{}'.format(info_prelude, - n_dso_p, - in_pkgs_in_run_reqs[0], - and_also), verbose=verbose) + if len(precs_in_reqs) == 1: + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in {precs_in_reqs[0]}{and_also}", + verbose=verbose, + ) elif in_whitelist: - _print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p), verbose=verbose) - elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: - _print_msg(errors, '{}: {} found in {}{}'.format(msg_prelude, - n_dso_p, - [p.quad[0] for p in pkgs], - and_also), verbose=verbose) - _print_msg(errors, '{}: .. but {} not in reqs/run, (i.e. it is overlinking)' - ' (likely) or a missing dependency (less likely)'. - format(msg_prelude, [p.quad[0] for p in pkgs]), verbose=verbose) - elif len(in_pkgs_in_run_reqs) > 1: - _print_msg(errors, '{}: {} found in multiple packages in run/reqs: {}{}' - .format(warn_prelude, - in_prefix_dso, - in_pkgs_in_run_reqs, - and_also), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in the whitelist", + verbose=verbose, + ) + elif len(precs_in_reqs) == 0 and len(precs) > 0: + _print_msg( + errors, + f"{msg_prelude}: {n_dso_p} found in {[str(prec) for prec in precs]}{and_also}", + verbose=verbose, + ) + _print_msg( + errors, + f"{msg_prelude}: .. but {[str(prec) for prec in precs]} not in reqs/run, " + "(i.e. it is overlinking) (likely) or a missing dependency (less likely)", + verbose=verbose, + ) + elif len(precs_in_reqs) > 1: + _print_msg( + errors, + f"{warn_prelude}: {in_prefix_dso} found in multiple packages in run/reqs: " + f"{[str(prec) for prec in precs_in_reqs]}{and_also}", + verbose=verbose, + ) else: if not any(in_prefix_dso == normpath(w) for w in files): - _print_msg(errors, '{}: {} not found in any packages'.format(msg_prelude, - in_prefix_dso), verbose=verbose) + _print_msg( + errors, + f"{msg_prelude}: {in_prefix_dso} not found in any packages", + verbose=verbose, + ) elif verbose: - _print_msg(errors, '{}: {} found in this package'.format(info_prelude, - in_prefix_dso), verbose=verbose) - - -def _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, - error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, - lib_packages_used, whitelist, sysroots, sysroot_prefix, sysroot_substitution, subdir): + _print_msg( + errors, + f"{info_prelude}: {in_prefix_dso} found in this package", + verbose=verbose, + ) + + +def _show_linking_messages( + files, + errors, + needed_dsos_for_file, + build_prefix, + run_prefix, + pkg_name, + error_overlinking, + runpath_whitelist, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + whitelist, + sysroots, + sysroot_prefix, + sysroot_substitution, + subdir, +): if len(sysroots): for sysroot, sr_files in sysroots.items(): - _print_msg(errors, " INFO: sysroot: '{}' files: '{}'".format(sysroot, - sorted(list(sr_files), reverse=True)[1:5]), - verbose=verbose) + _print_msg( + errors, + f" INFO: sysroot: '{sysroot}' files: '{sorted(list(sr_files), reverse=True)[1:5]}'", + verbose=verbose, + ) for f in files: path = join(run_prefix, f) - filetype = codefile_type(path) - if not filetype or filetype not in filetypes_for_platform[subdir.split('-')[0]]: + codefile = codefile_class(path, skip_symlinks=True) + if codefile not in filetypes_for_platform[subdir.split("-")[0]]: continue - warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, '/')) - err_prelude = " ERROR ({},{})".format(pkg_name, f.replace(os.sep, '/')) - info_prelude = " INFO ({},{})".format(pkg_name, f.replace(os.sep, '/')) + warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, "/")) + err_prelude = " ERROR ({},{})".format(pkg_name, f.replace(os.sep, "/")) + info_prelude = " INFO ({},{})".format(pkg_name, f.replace(os.sep, "/")) msg_prelude = err_prelude if error_overlinking else warn_prelude # TODO :: Determine this much earlier, storing in needed_dsos_for_file in _collect_needed_dsos() try: runpaths, _, _ = get_runpaths_raw(path) except: - _print_msg(errors, f'{warn_prelude}: pyldd.py failed to process', - verbose=verbose) + _print_msg( + errors, f"{warn_prelude}: pyldd.py failed to process", verbose=verbose + ) continue - if runpaths and not (runpath_whitelist or - any(fnmatch(f, w) for w in runpath_whitelist)): - _print_msg(errors, '{}: runpaths {} found in {}'.format(msg_prelude, - runpaths, - path), verbose=verbose) + if runpaths and not ( + runpath_whitelist or any(fnmatch(f, w) for w in runpath_whitelist) + ): + _print_msg( + errors, + f"{msg_prelude}: runpaths {runpaths} found in {path}", + verbose=verbose, + ) needed = needed_dsos_for_file[f] for needed_dso, needed_dso_info in needed.items(): - needed_dso = needed_dso.replace('/', os.sep) + needed_dso = needed_dso.replace("/", os.sep) # Should always be the case, even when we fail to resolve the original value is stored here # as it is still a best attempt and informative. - if 'resolved' in needed_dso_info: - needed_dso = needed_dso_info['resolved'] - if not needed_dso.startswith(os.sep) and not needed_dso.startswith('$'): - _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, - warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used) - elif needed_dso.startswith('$PATH'): - _print_msg(errors, "{}: {} found in build prefix; should never happen".format( - err_prelude, needed_dso), verbose=verbose) + if "resolved" in needed_dso_info: + needed_dso = needed_dso_info["resolved"] + if not needed_dso.startswith(os.sep) and not needed_dso.startswith("$"): + _lookup_in_prefix_packages( + errors, + needed_dso, + files, + run_prefix, + whitelist, + info_prelude, + msg_prelude, + warn_prelude, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + ) + elif needed_dso.startswith("$PATH"): + _print_msg( + errors, + f"{err_prelude}: {needed_dso} found in build prefix; should never happen", + verbose=verbose, + ) else: - _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots, msg_prelude, - info_prelude, sysroot_prefix, sysroot_substitution, - subdir, verbose) - - -def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdir, - ignore_run_exports, - requirements_run, requirements_build, requirements_host, - run_prefix, build_prefix, - missing_dso_whitelist, runpath_whitelist, - error_overlinking, error_overdepending, verbose, - exception_on_error, files, bldpkgs_dirs, output_folder, channel_urls, - enable_static=False, - variants={}): + _lookup_in_sysroots_and_whitelist( + errors, + whitelist, + needed_dso, + sysroots, + msg_prelude, + info_prelude, + sysroot_prefix, + sysroot_substitution, + subdir, + verbose, + ) + + +def check_overlinking_impl( + pkg_name: str, + pkg_version: str, + build_str: str, + build_number: int, + subdir: str, + ignore_run_exports, + requirements_run, + requirements_build, + requirements_host, + run_prefix, + build_prefix, + missing_dso_whitelist, + runpath_whitelist, + error_overlinking, + error_overdepending, + verbose, + exception_on_error, + files, + bldpkgs_dirs, + output_folder, + channel_urls, + enable_static=False, + variants={}, +): verbose = True errors = [] files_to_inspect = [] filesu = [] - for f in files: - path = join(run_prefix, f) - filetype = codefile_type(path) - if filetype and filetype in filetypes_for_platform[subdir.split('-')[0]]: - files_to_inspect.append(f) - filesu.append(f.replace('\\', '/')) + for file in files: + path = join(run_prefix, file) + codefile = codefile_class(path, skip_symlinks=True) + if codefile in filetypes_for_platform[subdir.split("-")[0]]: + files_to_inspect.append(file) + filesu.append(file.replace("\\", "/")) if not files_to_inspect: - return dict() + return {} - sysroot_substitution = '$SYSROOT' - build_prefix_substitution = '$PATH' + sysroot_substitution = "$SYSROOT" + build_prefix_substitution = "$PATH" # Used to detect overlinking (finally) - requirements_run = [req.split(' ')[0] for req in requirements_run] - packages = dists_from_names(requirements_run, run_prefix) - local_channel = dirname(bldpkgs_dirs).replace('\\', '/') if utils.on_win else dirname(bldpkgs_dirs)[1:] - pkg_vendored_dist, pkg_vendoring_key = _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, - local_channel, files) - packages.append(pkg_vendored_dist) + requirements_run = [req.split(" ")[0] for req in requirements_run] + pd = PrefixData(run_prefix) + precs = [prec for req in requirements_run if (prec := pd.get(req, None))] + local_channel = ( + dirname(bldpkgs_dirs).replace("\\", "/") + if utils.on_win + else dirname(bldpkgs_dirs)[1:] + ) + pkg_vendored_dist = PrefixRecord( + name=pkg_name, + version=str(pkg_version), + build=build_str, + build_number=build_number, + channel=local_channel, + files=files, + ) + pkg_vendoring_key = f"{pkg_name}-{pkg_version}-{build_str}" + precs.append(pkg_vendored_dist) ignore_list = utils.ensure_list(ignore_run_exports) - if subdir.startswith('linux'): - ignore_list.append('libgcc-ng') - - package_nature = {package: library_nature(package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) - for package in packages} - lib_packages = {package for package in packages - if package.quad[0] not in ignore_list and - [package] != 'non-library'} + if subdir.startswith("linux"): + ignore_list.append("libgcc-ng") + + package_nature = {prec: library_nature(prec, run_prefix) for prec in precs} + lib_packages = { + prec + for prec, nature in package_nature.items() + if prec.name not in ignore_list and nature != "non-library" + } lib_packages_used = {pkg_vendored_dist} - ignore_list_syms = ['main', '_main', '*get_pc_thunk*', '___clang_call_terminate', '_timeout'] + ignore_list_syms = [ + "main", + "_main", + "*get_pc_thunk*", + "___clang_call_terminate", + "_timeout", + ] # ignore_for_statics = ['gcc_impl_linux*', 'compiler-rt*', 'llvm-openmp*', 'gfortran_osx*'] # sysroots and whitelists are similar, but the subtle distinctions are important. - CONDA_BUILD_SYSROOT = variants.get('CONDA_BUILD_SYSROOT', None) + CONDA_BUILD_SYSROOT = variants.get("CONDA_BUILD_SYSROOT", None) if CONDA_BUILD_SYSROOT and os.path.exists(CONDA_BUILD_SYSROOT): # When on macOS and CBS not set, sysroots should probably be '/' # is everything in the sysroot allowed? I suppose so! - sysroot_prefix = '' + sysroot_prefix = "" sysroots = [CONDA_BUILD_SYSROOT] else: # The linux case. sysroot_prefix = build_prefix - sysroots = [sysroot + os.sep for sysroot in utils.glob(join(sysroot_prefix, '**', 'sysroot'))] + sysroots = [ + sysroot + os.sep + for sysroot in utils.glob(join(sysroot_prefix, "**", "sysroot")) + ] whitelist = [] vendoring_record = dict() # When build_is_host is True we perform file existence checks for files in the sysroot (e.g. C:\Windows) @@ -1078,20 +1344,20 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi # create some packages for the Windows System DLLs as an alternative?) build_is_host = False if not len(sysroots): - if subdir.startswith('osx-'): + if subdir.startswith("osx-"): # This is a bit confused! A sysroot shouldn't contain /usr/lib (it's the bit before that) # what we are really specifying here are subtrees of sysroots to search in and it may be # better to store each element of this as a tuple with a string and a nested tuple, e.g. # [('/', ('/usr/lib', '/opt/X11', '/System/Library/Frameworks'))] # Here we mean that we have a sysroot at '/' (could be a tokenized value like '$SYSROOT'?) # .. and in that sysroot there are 3 suddirs in which we may search for DSOs. - sysroots = ['/usr/lib', '/opt/X11', '/System/Library/Frameworks'] + sysroots = ["/usr/lib", "/opt/X11", "/System/Library/Frameworks"] whitelist = DEFAULT_MAC_WHITELIST - build_is_host = True if sys.platform == 'darwin' else False - elif subdir.startswith('win'): - sysroots = ['C:/Windows'] + build_is_host = True if on_mac else False + elif subdir.startswith("win"): + sysroots = ["C:/Windows"] whitelist = DEFAULT_WIN_WHITELIST - build_is_host = True if sys.platform == 'win-32' else False + build_is_host = True if on_win else False whitelist += missing_dso_whitelist or [] @@ -1099,24 +1365,31 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi # the first sysroot is more important than others. sysroots_files = dict() for sysroot in sysroots: - from conda_build.utils import prefix_files - srs = sysroot if sysroot.endswith('/') else sysroot + '/' + srs = sysroot if sysroot.endswith("/") else sysroot + "/" sysroot_files = prefix_files(sysroot) - sysroot_files = [p.replace('\\', '/') for p in sysroot_files] + sysroot_files = [p.replace("\\", "/") for p in sysroot_files] sysroots_files[srs] = sysroot_files - if subdir.startswith('osx-'): + if subdir.startswith("osx-"): orig_sysroot_files = copy(sysroot_files) sysroot_files = [] for f in orig_sysroot_files: replaced = f - if f.endswith('.tbd'): + if f.endswith(".tbd"): # For now, look up the line containing: # install-name: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation - with open(os.path.join(sysroot, f), 'rb') as tbd_fh: - lines = [line for line in tbd_fh.read().decode('utf-8').splitlines() if line.startswith('install-name:')] + with open(os.path.join(sysroot, f), "rb") as tbd_fh: + lines = [ + line + for line in tbd_fh.read().decode("utf-8").splitlines() + if line.startswith("install-name:") + ] if lines: - install_names = [re.match(r'^install-name:\s+(.*)$', line) for line in lines] - install_names = [insname.groups(1)[0] for insname in install_names] + install_names = [ + re.match(r"^install-name:\s+(.*)$", line) for line in lines + ] + install_names = [ + insname.groups(1)[0] for insname in install_names + ] replaced = install_names[0][1:] if replaced.endswith("'"): # Some SDKs have install name surrounded by single qoutes @@ -1125,38 +1398,60 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi diffs = set(orig_sysroot_files) - set(sysroot_files) if diffs: log = utils.get_logger(__name__) - log.warning("Partially parsed some '.tbd' files in sysroot {}, pretending .tbds are their install-names\n" - "Adding support to 'conda-build' for parsing these in 'liefldd.py' would be easy and useful:\n" - "{} ..." - .format(sysroot, list(diffs)[1:3])) + log.warning( + "Partially parsed some '.tbd' files in sysroot %s, pretending .tbds are their install-names\n" + "Adding support to 'conda-build' for parsing these in 'liefldd.py' would be easy and useful:\n" + "%s...", + sysroot, + list(diffs)[1:3], + ) sysroots_files[srs] = sysroot_files - sysroots_files = OrderedDict(sorted(sysroots_files.items(), key=lambda x: -len(x[1]))) - - all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos(sysroots_files, files, run_prefix, - sysroot_substitution, - build_prefix, build_prefix_substitution) + sysroots_files = OrderedDict( + sorted(sysroots_files.items(), key=lambda x: -len(x[1])) + ) + + all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos( + sysroots_files, + files, + run_prefix, + sysroot_substitution, + build_prefix, + build_prefix_substitution, + ) prefix_owners, _, _, all_lib_exports = _map_file_to_package( - files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, - sysroot_substitution, enable_static) + files, + run_prefix, + build_prefix, + all_needed_dsos, + pkg_vendored_dist, + ignore_list_syms, + sysroot_substitution, + enable_static, + ) for f in files_to_inspect: needed = needed_dsos_for_file[f] for needed_dso, needed_dso_info in needed.items(): orig = needed_dso - resolved = needed_dso_info['resolved'] - if (not resolved.startswith('/') and - not resolved.startswith(sysroot_substitution) and - not resolved.startswith(build_prefix_substitution) and - resolved.lower() not in [o.lower() for o in prefix_owners[run_prefix]] and - resolved not in filesu): + resolved = needed_dso_info["resolved"] + if ( + not resolved.startswith("/") + and not resolved.startswith(sysroot_substitution) + and not resolved.startswith(build_prefix_substitution) + and resolved.lower() + not in [o.lower() for o in prefix_owners[run_prefix]] + and resolved not in filesu + ): in_whitelist = False if not build_is_host: - in_whitelist = any([caseless_sepless_fnmatch([orig], w) for w in whitelist]) + in_whitelist = any( + [caseless_sepless_fnmatch([orig], w) for w in whitelist] + ) if not in_whitelist: if resolved in prefix_owners[build_prefix]: print(f" ERROR :: {needed_dso} in prefix_owners[build_prefix]") - elif not needed_dso.startswith('$PATH'): + elif not needed_dso.startswith("$PATH"): # DSOs with '$RPATH' in them at this stage are 'unresolved'. Though instead of # letting them through through like this, I should detect that they were not # resolved and change them back to how they were stored in the consumer DSO/elf @@ -1168,41 +1463,74 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi # f, rpaths, needed_dso)) pass - _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, - error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, - lib_packages_used, whitelist, sysroots_files, sysroot_prefix, sysroot_substitution, subdir) + _show_linking_messages( + files, + errors, + needed_dsos_for_file, + build_prefix, + run_prefix, + pkg_name, + error_overlinking, + runpath_whitelist, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + whitelist, + sysroots_files, + sysroot_prefix, + sysroot_substitution, + subdir, + ) if lib_packages_used != lib_packages: info_prelude = f" INFO ({pkg_name})" warn_prelude = f"WARNING ({pkg_name})" err_prelude = f" ERROR ({pkg_name})" for lib in lib_packages - lib_packages_used: - if package_nature[lib] in ('run-exports library', 'dso library'): + if package_nature[lib] in ("run-exports library", "dso library"): msg_prelude = err_prelude if error_overdepending else warn_prelude - elif package_nature[lib] == 'plugin library': + elif package_nature[lib] == "plugin library": msg_prelude = info_prelude else: msg_prelude = warn_prelude found_interpreted_and_interpreter = False - if 'interpreter' in package_nature[lib] and 'interpreted' in package_nature[pkg_vendored_dist]: + if ( + "interpreter" in package_nature[lib] + and "interpreted" in package_nature[pkg_vendored_dist] + ): found_interpreted_and_interpreter = True if found_interpreted_and_interpreter: - _print_msg(errors, "{}: Interpreted package '{}' is interpreted by '{}'".format( - info_prelude, pkg_vendored_dist.name, lib.name), verbose=verbose) - elif package_nature[lib] != 'non-library': - _print_msg(errors, "{}: {} package {} in requirements/run but it is not used " - "(i.e. it is overdepending or perhaps statically linked? " - "If that is what you want then add it to `build/ignore_run_exports`)" - .format(msg_prelude, package_nature[lib], lib), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: Interpreted package '{pkg_vendored_dist.name}' is interpreted by '{lib.name}'", + verbose=verbose, + ) + elif package_nature[lib] != "non-library": + _print_msg( + errors, + f"{msg_prelude}: {package_nature[lib]} package {lib} in requirements/run but it is not used " + "(i.e. it is overdepending or perhaps statically linked? " + "If that is what you want then add it to `build/ignore_run_exports`)", + verbose=verbose, + ) if len(errors): if exception_on_error: - runpaths_errors = [error for error in errors if re.match(r".*runpaths.*found in.*", error)] + runpaths_errors = [ + error for error in errors if re.match(r".*runpaths.*found in.*", error) + ] if len(runpaths_errors): raise RunPathError(runpaths_errors) - overlinking_errors = [error for error in errors if re.match(r".*(overlinking|not found in|did not find).*", error)] + overlinking_errors = [ + error + for error in errors + if re.match(r".*(overlinking|not found in|did not find).*", error) + ] if len(overlinking_errors): raise OverLinkingError(overlinking_errors) - overdepending_errors = [error for error in errors if "overdepending" in error] + overdepending_errors = [ + error for error in errors if "overdepending" in error + ] if len(overdepending_errors): raise OverDependingError(overdepending_errors) else: @@ -1215,53 +1543,62 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi return dict() -def check_overlinking(m, files, host_prefix=None): - if not host_prefix: - host_prefix = m.config.host_prefix - - overlinking_ignore_patterns = m.meta.get("build", {}).get("overlinking_ignore_patterns") - if overlinking_ignore_patterns: - files = [f for f in files if not any([fnmatch(f, p) for p in overlinking_ignore_patterns])] - return check_overlinking_impl(m.get_value('package/name'), - m.get_value('package/version'), - m.get_value('build/string'), - m.get_value('build/number'), - m.config.target_subdir, - m.get_value('build/ignore_run_exports'), - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('run', [])], - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('build', [])], - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('host', [])], - host_prefix, - m.config.build_prefix, - m.meta.get('build', {}).get('missing_dso_whitelist', []), - m.meta.get('build', {}).get('runpath_whitelist', []), - m.config.error_overlinking, - m.config.error_overdepending, - m.config.verbose, - True, - files, - m.config.bldpkgs_dir, - m.config.output_folder, - list(m.config.channel_urls) + ['local'], - m.config.enable_static, - m.config.variant) +def check_overlinking(m: MetaData, files, host_prefix=None): + patterns = m.get_value("build/overlinking_ignore_patterns", []) + files = [ + file + for file in files + if not any([fnmatch(file, pattern) for pattern in patterns]) + ] + return check_overlinking_impl( + m.name(), + m.version(), + m.build_id(), + m.build_number(), + m.config.target_subdir, + m.get_value("build/ignore_run_exports"), + [req.split(" ")[0] for req in m.get_value("requirements/run", [])], + [req.split(" ")[0] for req in m.get_value("requirements/build", [])], + [req.split(" ")[0] for req in m.get_value("requirements/host", [])], + host_prefix or m.config.host_prefix, + m.config.build_prefix, + m.get_value("build/missing_dso_whitelist", []), + m.get_value("build/runpath_whitelist", []), + m.config.error_overlinking, + m.config.error_overdepending, + m.config.verbose, + True, + files, + m.config.bldpkgs_dir, + m.config.output_folder, + [*m.config.channel_urls, "local"], + m.config.enable_static, + m.config.variant, + ) def post_process_shared_lib(m, f, files, host_prefix=None): if not host_prefix: host_prefix = m.config.host_prefix path = join(host_prefix, f) - codefile_t = codefile_type(path) - if not codefile_t or path.endswith('.debug'): + codefile = codefile_class(path, skip_symlinks=True) + if not codefile or path.endswith(".debug"): return - rpaths = m.get_value('build/rpaths', ['lib']) - if codefile_t == 'elffile': - mk_relative_linux(f, host_prefix, rpaths=rpaths, - method=m.get_value('build/rpaths_patcher', None)) - elif codefile_t == 'machofile': - if m.config.host_platform != 'osx': + rpaths = m.get_value("build/rpaths", ["lib"]) + if codefile == elffile: + mk_relative_linux( + f, + host_prefix, + rpaths=rpaths, + method=m.get_value("build/rpaths_patcher", None), + ) + elif codefile == machofile: + if m.config.host_platform != "osx": log = utils.get_logger(__name__) - log.warn("Found Mach-O file but patching is only supported on macOS, skipping: %s", path) + log.warn( + "Found Mach-O file but patching is only supported on macOS, skipping: %s", + path, + ) return mk_relative_osx(path, host_prefix, m, files=files, rpaths=rpaths) @@ -1281,7 +1618,14 @@ def fix_permissions(files, prefix): if old_mode & stat.S_IXUSR: new_mode = new_mode | stat.S_IXGRP | stat.S_IXOTH # ensure user and group can write and all can read - new_mode = new_mode | stat.S_IWUSR | stat.S_IWGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # noqa + new_mode = ( + new_mode + | stat.S_IWUSR + | stat.S_IWGRP + | stat.S_IRUSR + | stat.S_IRGRP + | stat.S_IROTH + ) # noqa if old_mode != new_mode: try: lchmod(path, new_mode) @@ -1290,8 +1634,67 @@ def fix_permissions(files, prefix): log.warn(str(e)) +def check_menuinst_json(files, prefix) -> None: + """ + Check that Menu/*.json files are valid menuinst v2 JSON documents, + as defined by the CEP-11 schema. This JSON schema is part of the `menuinst` + package. + + Validation can fail if the menu/*.json file is not valid JSON, or if it doesn't + comply with the menuinst schema. + + We validate at build-time so we don't have to validate at install-time, saving + `conda` a few dependencies. + """ + json_files = fnmatch_filter(files, "[Mm][Ee][Nn][Uu][/\\]*.[Jj][Ss][Oo][Nn]") + if not json_files: + return + + print("Validating Menu/*.json files") + log = utils.get_logger(__name__, dedupe=False) + try: + import jsonschema + from menuinst.utils import data_path + except ImportError as exc: + log.warning( + "Found 'Menu/*.json' files but couldn't validate: %s", + ", ".join(json_files), + exc_info=exc, + ) + return + + try: + schema_path = data_path("menuinst.schema.json") + with open(schema_path) as f: + schema = json.load(f) + ValidatorClass = jsonschema.validators.validator_for(schema) + validator = ValidatorClass(schema) + except (jsonschema.SchemaError, json.JSONDecodeError, OSError) as exc: + log.warning("'%s' is not a valid menuinst schema", schema_path, exc_info=exc) + return + + for json_file in json_files: + try: + with open(join(prefix, json_file)) as f: + text = f.read() + if "$schema" not in text: + log.warning( + "menuinst v1 JSON document '%s' won't be validated.", json_file + ) + continue + validator.validate(json.loads(text)) + except (jsonschema.ValidationError, json.JSONDecodeError, OSError) as exc: + log.warning( + "'%s' is not a valid menuinst JSON document!", + json_file, + exc_info=exc, + ) + else: + log.info("'%s' is a valid menuinst JSON document", json_file) + + def post_build(m, files, build_python, host_prefix=None, is_already_linked=False): - print('number of files:', len(files)) + print("number of files:", len(files)) if not host_prefix: host_prefix = m.config.host_prefix @@ -1300,63 +1703,73 @@ def post_build(m, files, build_python, host_prefix=None, is_already_linked=False for f in files: make_hardlink_copy(f, host_prefix) - if not m.config.target_subdir.startswith('win'): + if not m.config.target_subdir.startswith("win"): binary_relocation = m.binary_relocation() if not binary_relocation: print("Skipping binary relocation logic") - osx_is_app = (m.config.target_subdir.startswith('osx-') and - bool(m.get_value('build/osx_is_app', False))) + osx_is_app = m.config.target_subdir.startswith("osx-") and bool( + m.get_value("build/osx_is_app", False) + ) check_symlinks(files, host_prefix, m.config.croot) prefix_files = utils.prefix_files(host_prefix) for f in files: - if f.startswith('bin/'): - fix_shebang(f, prefix=host_prefix, build_python=build_python, - osx_is_app=osx_is_app) - if binary_relocation is True or (isinstance(binary_relocation, list) and - f in binary_relocation): + if f.startswith("bin/"): + fix_shebang( + f, + prefix=host_prefix, + build_python=build_python, + osx_is_app=osx_is_app, + ) + if binary_relocation is True or ( + isinstance(binary_relocation, list) and f in binary_relocation + ): post_process_shared_lib(m, f, prefix_files, host_prefix) check_overlinking(m, files, host_prefix) + check_menuinst_json(files, host_prefix) def check_symlinks(files, prefix, croot): - if readlink is False: - return # Not on Unix system msgs = [] real_build_prefix = realpath(prefix) for f in files: path = join(real_build_prefix, f) if islink(path): - link_path = readlink(path) + link_path = os.readlink(path) real_link_path = realpath(path) # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS # If condition exists, then copy the file rather than symlink it. - if (not dirname(link_path) == dirname(real_link_path) and - codefile_type(f)): + if not dirname(link_path) == dirname(real_link_path) and codefile_class( + f, skip_symlinks=True + ): os.remove(path) utils.copy_into(real_link_path, path) elif real_link_path.startswith(real_build_prefix): # If the path is in the build prefix, this is fine, but # the link needs to be relative relative_path = relpath(real_link_path, dirname(path)) - if not link_path.startswith('.') and link_path != relative_path: + if not link_path.startswith(".") and link_path != relative_path: # Don't change the link structure if it is already a # relative link. It's possible that ..'s later in the path # can result in a broken link still, but we'll assume that # such crazy things don't happen. - print(f"Making absolute symlink relative ({f} -> {link_path} :-> {relative_path})") + print( + f"Making absolute symlink relative ({f} -> {link_path} :-> {relative_path})" + ) os.unlink(path) os.symlink(relative_path, path) else: # Symlinks to absolute paths on the system (like /usr) are fine. if real_link_path.startswith(croot): - msgs.append("%s is a symlink to a path that may not " - "exist after the build is completed (%s)" % (f, link_path)) + msgs.append( + f"{f} is a symlink to a path that may not " + f"exist after the build is completed ({link_path})" + ) if msgs: for msg in msgs: - print("Error: %s" % msg, file=sys.stderr) + print(f"Error: {msg}", file=sys.stderr) sys.exit(1) @@ -1380,15 +1793,21 @@ def make_hardlink_copy(path, prefix): def get_build_metadata(m): src_dir = m.config.work_dir - if exists(join(src_dir, '__conda_version__.txt')): - raise ValueError("support for __conda_version__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") - if exists(join(src_dir, '__conda_buildnum__.txt')): - raise ValueError("support for __conda_buildnum__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") - if exists(join(src_dir, '__conda_buildstr__.txt')): - raise ValueError("support for __conda_buildstr__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") + if exists(join(src_dir, "__conda_version__.txt")): + raise ValueError( + "support for __conda_version__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) + if exists(join(src_dir, "__conda_buildnum__.txt")): + raise ValueError( + "support for __conda_buildnum__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) + if exists(join(src_dir, "__conda_buildstr__.txt")): + raise ValueError( + "support for __conda_buildstr__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) diff --git a/conda_build/render.py b/conda_build/render.py index e89103e2a5..cc3bcd87c0 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -1,37 +1,57 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict, defaultdict -from functools import lru_cache +from __future__ import annotations + import json import os -from os.path import isdir, isfile, abspath import random import re -import shutil import string import subprocess import sys import tarfile -import tempfile +from collections import OrderedDict, defaultdict +from contextlib import contextmanager +from functools import lru_cache +from os.path import ( + isabs, + isdir, + isfile, + join, + normpath, +) +from pathlib import Path +from typing import TYPE_CHECKING import yaml - -from .conda_interface import (UnsatisfiableError, ProgressiveFetchExtract, - TemporaryDirectory) -from .conda_interface import execute_actions -from .conda_interface import pkgs_dirs -from .conda_interface import conda_43 -from .conda_interface import specs_from_url -from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 - -from conda_build import exceptions, utils, environ -from conda_build.metadata import MetaData, combine_top_level_metadata_with_output -import conda_build.source as source -from conda_build.variants import (get_package_variants, list_of_dicts_to_dict_of_lists, - filter_by_key_value) -from conda_build.exceptions import DependencyNeedsBuildingError -from conda_build.index import get_build_index -# from conda_build.jinja_context import pin_subpackage_against_outputs +from conda.base.context import context +from conda.cli.common import specs_from_url +from conda.core.package_cache_data import ProgressiveFetchExtract +from conda.exceptions import UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder + +from . import environ, exceptions, source, utils +from .exceptions import DependencyNeedsBuildingError +from .index import get_build_index +from .metadata import MetaData, MetaDataTuple, combine_top_level_metadata_with_output +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + package_record_to_requirement, +) +from .variants import ( + filter_by_key_value, + get_package_variants, + list_of_dicts_to_dict_of_lists, +) + +if TYPE_CHECKING: + import os + from typing import Any, Iterable, Iterator + + from .config import Config def odict_representer(dumper, data): @@ -43,13 +63,13 @@ def odict_representer(dumper, data): yaml.add_representer(OrderedDict, odict_representer) -def bldpkg_path(m): - ''' +def bldpkg_path(m: MetaData) -> str: + """ Returns path to built package's tarball given its ``Metadata``. - ''' - subdir = 'noarch' if m.noarch or m.noarch_python else m.config.host_subdir + """ + subdir = "noarch" if m.noarch or m.noarch_python else m.config.host_subdir - if not hasattr(m, 'type'): + if not hasattr(m, "type"): if m.config.conda_pkg_format == "2": pkg_type = "conda_v2" else: @@ -59,25 +79,20 @@ def bldpkg_path(m): # the default case will switch over to conda_v2 at some point if pkg_type == "conda": - path = os.path.join(m.config.output_folder, subdir, f'{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}') + path = join( + m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}" + ) elif pkg_type == "conda_v2": - path = os.path.join(m.config.output_folder, subdir, f'{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}') + path = join( + m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}" + ) else: - path = f'{m.type} file for {m.name()} in: {os.path.join(m.config.output_folder, subdir)}' + path = ( + f"{m.type} file for {m.name()} in: {join(m.config.output_folder, subdir)}" + ) return path -def actions_to_pins(actions): - specs = [] - if conda_43: - spec_name = lambda x: x.dist_name - else: - spec_name = lambda x: str(x) - if 'LINK' in actions: - specs = [' '.join(spec_name(spec).split()[0].rsplit('-', 2)) for spec in actions['LINK']] - return specs - - def _categorize_deps(m, specs, exclude_pattern, variant): subpackages = [] dependencies = [] @@ -88,79 +103,97 @@ def _categorize_deps(m, specs, exclude_pattern, variant): if not exclude_pattern or not exclude_pattern.match(spec): is_subpackage = False spec_name = spec.split()[0] - for entry in m.get_section('outputs'): - name = entry.get('name') + for entry in m.get_section("outputs"): + name = entry.get("name") if name == spec_name: - subpackages.append(' '.join((name, m.version()))) + subpackages.append(" ".join((name, m.version()))) is_subpackage = True if not is_subpackage: dependencies.append(spec) # fill in variant version iff no version at all is provided for key, value in variant.items(): # for sake of comparison, ignore dashes and underscores - if (dash_or_under.sub("", key) == dash_or_under.sub("", spec_name) and - not re.search(r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name, spec)): + if dash_or_under.sub("", key) == dash_or_under.sub( + "", spec_name + ) and not re.search(rf"{spec_name}\s+[0-9a-zA-Z\_\.\<\>\=\*]", spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) return subpackages, dependencies, pass_through_deps -def get_env_dependencies(m, env, variant, exclude_pattern=None, - permit_unsatisfiable_variants=False, - merge_build_host_on_same_platform=True): +def get_env_dependencies( + m: MetaData, + env, + variant, + exclude_pattern=None, + permit_unsatisfiable_variants=False, + merge_build_host_on_same_platform=True, +): specs = m.get_depends_top_and_out(env) # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x - if env in ('build', 'host'): + if env in ("build", "host"): no_xx_specs = [] for spec in specs: - if ' x.x' in spec: + if " x.x" in spec: pkg_name = spec.split()[0] - no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, "")))) + no_xx_specs.append(" ".join((pkg_name, variant.get(pkg_name, "")))) else: no_xx_specs.append(spec) specs = no_xx_specs - subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant) + subpackages, dependencies, pass_through_deps = _categorize_deps( + m, specs, exclude_pattern, variant + ) dependencies = set(dependencies) unsat = None - random_string = ''.join(random.choice(string.ascii_uppercase + string.digits) - for _ in range(10)) + random_string = "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(10) + ) with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir: try: - actions = environ.get_install_actions(tmpdir, tuple(dependencies), env, - subdir=getattr(m.config, f'{env}_subdir'), - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + precs = environ.get_package_records( + tmpdir, + tuple(dependencies), + env, + subdir=getattr(m.config, f"{env}_subdir"), + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) except (UnsatisfiableError, DependencyNeedsBuildingError) as e: # we'll get here if the environment is unsatisfiable - if hasattr(e, 'packages'): - unsat = ', '.join(e.packages) + if hasattr(e, "packages"): + unsat = ", ".join(e.packages) else: unsat = e.message if permit_unsatisfiable_variants: - actions = {} + precs = [] else: raise - specs = actions_to_pins(actions) - return (utils.ensure_list((specs + subpackages + pass_through_deps) or - m.meta.get('requirements', {}).get(env, [])), - actions, unsat) + specs = [package_record_to_requirement(prec) for prec in precs] + return ( + utils.ensure_list( + (specs + subpackages + pass_through_deps) + or m.get_value(f"requirements/{env}", []) + ), + precs, + unsat, + ) def strip_channel(spec_str): - if hasattr(spec_str, 'decode'): + if hasattr(spec_str, "decode"): spec_str = spec_str.decode() - if ':' in spec_str: + if ":" in spec_str: spec_str = spec_str.split("::")[-1] return spec_str @@ -168,23 +201,30 @@ def strip_channel(spec_str): def get_pin_from_build(m, dep, build_dep_versions): dep_split = dep.split() dep_name = dep_split[0] - build = '' + build = "" if len(dep_split) >= 3: build = dep_split[2] pin = None version = build_dep_versions.get(dep_name) or m.config.variant.get(dep_name) - if (version and dep_name in m.config.variant.get('pin_run_as_build', {}) and - not (dep_name == 'python' and (m.noarch or m.noarch_python)) and - dep_name in build_dep_versions): - pin_cfg = m.config.variant['pin_run_as_build'][dep_name] + if ( + version + and dep_name in m.config.variant.get("pin_run_as_build", {}) + and not (dep_name == "python" and (m.noarch or m.noarch_python)) + and dep_name in build_dep_versions + ): + pin_cfg = m.config.variant["pin_run_as_build"][dep_name] if isinstance(pin_cfg, str): # if pin arg is a single 'x.x', use the same value for min and max pin_cfg = dict(min_pin=pin_cfg, max_pin=pin_cfg) pin = utils.apply_pin_expressions(version.split()[0], **pin_cfg) - elif dep.startswith('numpy') and 'x.x' in dep: + elif dep.startswith("numpy") and "x.x" in dep: if not build_dep_versions.get(dep_name): - raise ValueError("numpy x.x specified, but numpy not in build requirements.") - pin = utils.apply_pin_expressions(version.split()[0], min_pin='x.x', max_pin='x.x') + raise ValueError( + "numpy x.x specified, but numpy not in build requirements." + ) + pin = utils.apply_pin_expressions( + version.split()[0], min_pin="x.x", max_pin="x.x" + ) if pin: dep = " ".join((dep_name, pin, build)).strip() return dep @@ -194,57 +234,72 @@ def _filter_run_exports(specs, ignore_list): filtered_specs = {} for agent, specs_list in specs.items(): for spec in specs_list: - if hasattr(spec, 'decode'): + if hasattr(spec, "decode"): spec = spec.decode() - if not any((ignore_spec == '*' or spec == ignore_spec or - spec.startswith(ignore_spec + ' ')) for ignore_spec in ignore_list): + if not any( + ( + ignore_spec == "*" + or spec == ignore_spec + or spec.startswith(ignore_spec + " ") + ) + for ignore_spec in ignore_list + ): filtered_specs[agent] = filtered_specs.get(agent, []) + [spec] return filtered_specs -def find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=False): - _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs) - pkg_loc = None - for pkgs_dir in _pkgs_dirs: - pkg_dir = os.path.join(pkgs_dir, pkg_dist) - pkg_file = os.path.join(pkgs_dir, pkg_dist + CONDA_PACKAGE_EXTENSION_V1) - if not files_only and os.path.isdir(pkg_dir): - pkg_loc = pkg_dir - break - elif os.path.isfile(pkg_file): - pkg_loc = pkg_file - break - elif files_only and os.path.isdir(pkg_dir): - pkg_loc = pkg_file - # create the tarball on demand. This is so that testing on archives works. - with tarfile.open(pkg_file, 'w:bz2') as archive: - for entry in os.listdir(pkg_dir): - archive.add(os.path.join(pkg_dir, entry), arcname=entry) - pkg_subdir = os.path.join(m.config.croot, m.config.host_subdir) - pkg_loc = os.path.join(pkg_subdir, os.path.basename(pkg_file)) - shutil.move(pkg_file, pkg_loc) - return pkg_loc +def find_pkg_dir_or_file_in_pkgs_dirs( + distribution: str, m: MetaData, files_only: bool = False +) -> str | None: + for cache in map(Path, (*context.pkgs_dirs, *m.config.bldpkgs_dirs)): + package = cache / (distribution + CONDA_PACKAGE_EXTENSION_V1) + if package.is_file(): + return str(package) + + directory = cache / distribution + if directory.is_dir(): + if not files_only: + return str(directory) + + # get the package's subdir + try: + subdir = json.loads((directory / "info" / "index.json").read_text())[ + "subdir" + ] + except (FileNotFoundError, KeyError): + subdir = m.config.host_subdir + + # create the tarball on demand so testing on archives works + package = Path( + m.config.croot, subdir, distribution + CONDA_PACKAGE_EXTENSION_V1 + ) + with tarfile.open(package, "w:bz2") as archive: + for entry in directory.iterdir(): + archive.add(entry, arcname=entry.name) + + return str(package) + return None @lru_cache(maxsize=None) def _read_specs_from_package(pkg_loc, pkg_dist): specs = {} - if pkg_loc and os.path.isdir(pkg_loc): - downstream_file = os.path.join(pkg_loc, 'info/run_exports') - if os.path.isfile(downstream_file): + if pkg_loc and isdir(pkg_loc): + downstream_file = join(pkg_loc, "info/run_exports") + if isfile(downstream_file): with open(downstream_file) as f: - specs = {'weak': [spec.rstrip() for spec in f.readlines()]} + specs = {"weak": [spec.rstrip() for spec in f.readlines()]} # a later attempt: record more info in the yaml file, to support "strong" run exports - elif os.path.isfile(downstream_file + '.yaml'): - with open(downstream_file + '.yaml') as f: + elif isfile(downstream_file + ".yaml"): + with open(downstream_file + ".yaml") as f: specs = yaml.safe_load(f) - elif os.path.isfile(downstream_file + '.json'): - with open(downstream_file + '.json') as f: + elif isfile(downstream_file + ".json"): + with open(downstream_file + ".json") as f: specs = json.load(f) - if not specs and pkg_loc and os.path.isfile(pkg_loc): + if not specs and pkg_loc and isfile(pkg_loc): # switching to json for consistency in conda-build 4 - specs_yaml = utils.package_has_file(pkg_loc, 'info/run_exports.yaml') - specs_json = utils.package_has_file(pkg_loc, 'info/run_exports.json') + specs_yaml = utils.package_has_file(pkg_loc, "info/run_exports.yaml") + specs_json = utils.package_has_file(pkg_loc, "info/run_exports.json") if hasattr(specs_json, "decode"): specs_json = specs_json.decode("utf-8") @@ -253,7 +308,7 @@ def _read_specs_from_package(pkg_loc, pkg_dist): elif specs_yaml: specs = yaml.safe_load(specs_yaml) else: - legacy_specs = utils.package_has_file(pkg_loc, 'info/run_exports') + legacy_specs = utils.package_has_file(pkg_loc, "info/run_exports") # exclude packages pinning themselves (makes no sense) if legacy_specs: weak_specs = set() @@ -262,100 +317,117 @@ def _read_specs_from_package(pkg_loc, pkg_dist): for spec in legacy_specs.splitlines(): if hasattr(spec, "decode"): spec = spec.decode("utf-8") - if not spec.startswith(pkg_dist.rsplit('-', 2)[0]): + if not spec.startswith(pkg_dist.rsplit("-", 2)[0]): weak_specs.add(spec.rstrip()) - specs = {'weak': sorted(list(weak_specs))} + specs = {"weak": sorted(list(weak_specs))} return specs -def execute_download_actions(m, actions, env, package_subset=None, require_files=False): - index, _, _ = get_build_index(getattr(m.config, f'{env}_subdir'), bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout) +def execute_download_actions(m, precs, env, package_subset=None, require_files=False): + subdir = getattr(m.config, f"{env}_subdir") + index, _, _ = get_build_index( + subdir=subdir, + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + clear_cache=False, + omit_defaults=False, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + ) # this should be just downloading packages. We don't need to extract them - - download_actions = {k: v for k, v in actions.items() if k in ('FETCH', 'EXTRACT', 'PREFIX')} - if 'FETCH' in actions or 'EXTRACT' in actions: - # this is to force the download - execute_actions(download_actions, index, verbose=m.config.debug) + # NOTE: The following commented execute_actions is defunct + # (FETCH/EXTRACT were replaced by PROGRESSIVEFETCHEXTRACT). + # + # download_actions = { + # k: v for k, v in actions.items() if k in (FETCH, EXTRACT, PREFIX) + # } + # if FETCH in actions or EXTRACT in actions: + # # this is to force the download + # execute_actions(download_actions, index, verbose=m.config.debug) pkg_files = {} - packages = actions.get('LINK', []) - package_subset = utils.ensure_list(package_subset) + if isinstance(package_subset, PackageRecord): + package_subset = [package_subset] + else: + package_subset = utils.ensure_list(package_subset) selected_packages = set() if package_subset: for pkg in package_subset: - if hasattr(pkg, 'name'): - if pkg in packages: - selected_packages.add(pkg) + if isinstance(pkg, PackageRecord): + prec = pkg + if prec in precs: + selected_packages.add(prec) else: pkg_name = pkg.split()[0] - for link_pkg in packages: - if pkg_name == link_pkg.name: - selected_packages.add(link_pkg) + for link_prec in precs: + if pkg_name == link_prec.name: + selected_packages.add(link_prec) break - packages = selected_packages + precs = selected_packages - for pkg in packages: - if hasattr(pkg, 'dist_name'): - pkg_dist = pkg.dist_name - else: - pkg = strip_channel(pkg) - pkg_dist = pkg.split(' ')[0] - pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files) + for prec in precs: + pkg_dist = "-".join((prec.name, prec.version, prec.build)) + pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs( + pkg_dist, m, files_only=require_files + ) # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. - if not pkg_loc and conda_43: - try: - pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0] - # the conda 4.4 API uses a single `link_prefs` kwarg - # whereas conda 4.3 used `index` and `link_dists` kwargs - pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],)) - except TypeError: - # TypeError: __init__() got an unexpected keyword argument 'link_prefs' - pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) + if not pkg_loc: + link_prec = [ + rec + for rec in index + if (rec.name, rec.version, rec.build) + == (prec.name, prec.version, prec.build) + ][0] + pfe = ProgressiveFetchExtract(link_prefs=(link_prec,)) with utils.LoggingContext(): pfe.execute() - for pkg_dir in pkgs_dirs: - _loc = os.path.join(pkg_dir, index[pkg].fn) - if os.path.isfile(_loc): + for pkg_dir in context.pkgs_dirs: + _loc = join(pkg_dir, prec.fn) + if isfile(_loc): pkg_loc = _loc break - pkg_files[pkg] = pkg_loc, pkg_dist + pkg_files[prec] = pkg_loc, pkg_dist return pkg_files -def get_upstream_pins(m, actions, env): +def get_upstream_pins(m: MetaData, precs, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" + env_specs = m.get_value(f"requirements/{env}", []) + explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] + precs = [prec for prec in precs if prec.name in explicit_specs] - env_specs = m.meta.get('requirements', {}).get(env, []) - explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else [] - linked_packages = actions.get('LINK', []) - linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs] - - ignore_pkgs_list = utils.ensure_list(m.get_value('build/ignore_run_exports_from')) - ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports')) + ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) + ignore_list = utils.ensure_list(m.get_value("build/ignore_run_exports")) additional_specs = {} - for pkg in linked_packages: - if any(pkg.name in req.split(' ')[0] for req in ignore_pkgs_list): + for prec in precs: + if any(prec.name in req.split(" ")[0] for req in ignore_pkgs_list): continue run_exports = None if m.config.use_channeldata: - channeldata = utils.download_channeldata(pkg.channel) + channeldata = utils.download_channeldata(prec.channel) # only use channeldata if requested, channeldata exists and contains # a packages key, otherwise use run_exports from the packages themselves - if 'packages' in channeldata: - pkg_data = channeldata['packages'].get(pkg.name, {}) - run_exports = pkg_data.get('run_exports', {}).get(pkg.version, {}) + if "packages" in channeldata: + pkg_data = channeldata["packages"].get(prec.name, {}) + run_exports = pkg_data.get("run_exports", {}).get(prec.version, {}) if run_exports is None: - loc, dist = execute_download_actions(m, actions, env=env, package_subset=pkg)[pkg] + loc, dist = execute_download_actions( + m, + precs, + env=env, + package_subset=[prec], + )[prec] run_exports = _read_specs_from_package(loc, dist) specs = _filter_run_exports(run_exports, ignore_list) if specs: @@ -363,88 +435,121 @@ def get_upstream_pins(m, actions, env): return additional_specs -def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern): - deps, actions, unsat = get_env_dependencies(m, env, m.config.variant, - exclude_pattern, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) +def _read_upstream_pin_files( + m: MetaData, + env, + permit_unsatisfiable_variants, + exclude_pattern, +): + deps, precs, unsat = get_env_dependencies( + m, + env, + m.config.variant, + exclude_pattern, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) # extend host deps with strong build run exports. This is important for things like # vc feature activation to work correctly in the host env. - extra_run_specs = get_upstream_pins(m, actions, env) - return list(set(deps)) or m.meta.get('requirements', {}).get(env, []), unsat, extra_run_specs + extra_run_specs = get_upstream_pins(m, precs, env) + return ( + list(set(deps)) or m.get_value(f"requirements/{env}", []), + unsat, + extra_run_specs, + ) -def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern): +def add_upstream_pins(m: MetaData, permit_unsatisfiable_variants, exclude_pattern): """Applies run_exports from any build deps to host and run sections""" # if we have host deps, they're more important than the build deps. - requirements = m.meta.get('requirements', {}) - build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build', - permit_unsatisfiable_variants, exclude_pattern) + requirements = m.get_section("requirements") + build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files( + m, "build", permit_unsatisfiable_variants, exclude_pattern + ) # is there a 'host' section? if m.is_cross: # this must come before we read upstream pins, because it will enforce things # like vc version from the compiler. - host_reqs = utils.ensure_list(m.get_value('requirements/host')) + host_reqs = utils.ensure_list(m.get_value("requirements/host")) # ensure host_reqs is present, so in-place modification below is actually in-place - requirements = m.meta.setdefault('requirements', {}) - requirements['host'] = host_reqs + requirements = m.meta.setdefault("requirements", {}) + requirements["host"] = host_reqs if not host_reqs: - matching_output = [out for out in m.meta.get('outputs', []) if - out.get('name') == m.name()] + matching_output = [ + out for out in m.get_section("outputs") if out.get("name") == m.name() + ] if matching_output: - requirements = utils.expand_reqs(matching_output[0].get('requirements', {})) - matching_output[0]['requirements'] = requirements - host_reqs = requirements.setdefault('host', []) + requirements = utils.expand_reqs( + matching_output[0].get("requirements", {}) + ) + matching_output[0]["requirements"] = requirements + host_reqs = requirements.setdefault("host", []) # in-place modification of above thingie - host_reqs.extend(extra_run_specs_from_build.get('strong', [])) + host_reqs.extend(extra_run_specs_from_build.get("strong", [])) - host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host', - permit_unsatisfiable_variants, exclude_pattern) + host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files( + m, "host", permit_unsatisfiable_variants, exclude_pattern + ) if m.noarch or m.noarch_python: - extra_run_specs = set(extra_run_specs_from_host.get('noarch', [])) + extra_run_specs = set(extra_run_specs_from_host.get("noarch", [])) extra_run_constrained_specs = set() else: - extra_run_specs = set(extra_run_specs_from_host.get('strong', []) + - extra_run_specs_from_host.get('weak', []) + - extra_run_specs_from_build.get('strong', [])) + extra_run_specs = set( + extra_run_specs_from_host.get("strong", []) + + extra_run_specs_from_host.get("weak", []) + + extra_run_specs_from_build.get("strong", []) + ) extra_run_constrained_specs = set( - extra_run_specs_from_host.get('strong_constrains', []) + - extra_run_specs_from_host.get('weak_constrains', []) + - extra_run_specs_from_build.get('strong_constrains', []) + extra_run_specs_from_host.get("strong_constrains", []) + + extra_run_specs_from_host.get("weak_constrains", []) + + extra_run_specs_from_build.get("strong_constrains", []) ) else: host_deps = [] host_unsat = [] if m.noarch or m.noarch_python: if m.build_is_host: - extra_run_specs = set(extra_run_specs_from_build.get('noarch', [])) + extra_run_specs = set(extra_run_specs_from_build.get("noarch", [])) extra_run_constrained_specs = set() - build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('noarch', [])) + build_deps = set(build_deps or []).update( + extra_run_specs_from_build.get("noarch", []) + ) else: extra_run_specs = set() extra_run_constrained_specs = set() build_deps = set(build_deps or []) else: - extra_run_specs = set(extra_run_specs_from_build.get('strong', [])) - extra_run_constrained_specs = set(extra_run_specs_from_build.get('strong_constrains', [])) + extra_run_specs = set(extra_run_specs_from_build.get("strong", [])) + extra_run_constrained_specs = set( + extra_run_specs_from_build.get("strong_constrains", []) + ) if m.build_is_host: - extra_run_specs.update(extra_run_specs_from_build.get('weak', [])) - extra_run_constrained_specs.update(extra_run_specs_from_build.get('weak_constrains', [])) - build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', [])) + extra_run_specs.update(extra_run_specs_from_build.get("weak", [])) + extra_run_constrained_specs.update( + extra_run_specs_from_build.get("weak_constrains", []) + ) + build_deps = set(build_deps or []).update( + extra_run_specs_from_build.get("weak", []) + ) else: - host_deps = set(extra_run_specs_from_build.get('strong', [])) + host_deps = set(extra_run_specs_from_build.get("strong", [])) - run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run'))) - run_constrained_deps = extra_run_constrained_specs | set(utils.ensure_list(requirements.get('run_constrained'))) + run_deps = extra_run_specs | set(utils.ensure_list(requirements.get("run"))) + run_constrained_deps = extra_run_constrained_specs | set( + utils.ensure_list(requirements.get("run_constrained")) + ) for section, deps in ( - ('build', build_deps), ('host', host_deps), ('run', run_deps), ('run_constrained', run_constrained_deps), + ("build", build_deps), + ("host", host_deps), + ("run", run_deps), + ("run_constrained", run_constrained_deps), ): if deps: requirements[section] = list(deps) - m.meta['requirements'] = requirements + m.meta["requirements"] = requirements return build_unsat, host_unsat @@ -453,9 +558,9 @@ def _simplify_to_exact_constraints(metadata): For metapackages that are pinned exactly, we want to bypass all dependencies that may be less exact. """ - requirements = metadata.meta.get('requirements', {}) + requirements = metadata.meta.get("requirements", {}) # collect deps on a per-section basis - for section in 'build', 'host', 'run': + for section in "build", "host", "run": deps = utils.ensure_list(requirements.get(section, [])) deps_dict = defaultdict(list) for dep in deps: @@ -472,7 +577,7 @@ def _simplify_to_exact_constraints(metadata): for dep in values: if len(dep) > 1: version, build = dep[:2] - if not (any(c in version for c in ('>', '<', '*')) or '*' in build): + if not (any(c in version for c in (">", "<", "*")) or "*" in build): exact_pins.append(dep) if len(values) == 1 and not any(values): deps_list.append(name) @@ -480,15 +585,19 @@ def _simplify_to_exact_constraints(metadata): if not all(pin == exact_pins[0] for pin in exact_pins): raise ValueError(f"Conflicting exact pins: {exact_pins}") else: - deps_list.append(' '.join([name] + exact_pins[0])) + deps_list.append(" ".join([name] + exact_pins[0])) else: - deps_list.extend(' '.join([name] + dep) for dep in values if dep) + deps_list.extend(" ".join([name] + dep) for dep in values if dep) if section in requirements and deps_list: requirements[section] = deps_list - metadata.meta['requirements'] = requirements + metadata.meta["requirements"] = requirements -def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): +def finalize_metadata( + m: MetaData, + parent_metadata=None, + permit_unsatisfiable_variants=False, +): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m @@ -496,21 +605,24 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal m.final = True else: exclude_pattern = None - excludes = set(m.config.variant.get('ignore_version', [])) + excludes = set(m.config.variant.get("ignore_version", [])) - for key in m.config.variant.get('pin_run_as_build', {}).keys(): + for key in m.config.variant.get("pin_run_as_build", {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() - if hasattr(m, 'other_outputs'): + if hasattr(m, "other_outputs"): output_excludes = {name for (name, variant) in m.other_outputs.keys()} if excludes or output_excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in excludes | output_excludes)) + exclude_pattern = re.compile( + r"|".join( + rf"(?:^{exc}(?:\s|$|\Z))" for exc in excludes | output_excludes + ) + ) - parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) + parent_recipe = m.get_value("extra/parent_recipe", {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output @@ -519,111 +631,118 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal is_top_level = True if output: - if 'package' in output or 'name' not in output: + if "package" in output or "name" not in output: # it's just a top-level recipe - output = {'name': m.name()} + output = {"name": m.name()} else: is_top_level = False - if not parent_recipe or parent_recipe['name'] == m.name(): + if not parent_recipe or parent_recipe["name"] == m.name(): combine_top_level_metadata_with_output(m, output) - requirements = utils.expand_reqs(output.get('requirements', {})) - m.meta['requirements'] = requirements + requirements = utils.expand_reqs(output.get("requirements", {})) + m.meta["requirements"] = requirements - if m.meta.get('requirements'): - utils.insert_variant_versions(m.meta['requirements'], - m.config.variant, 'build') - utils.insert_variant_versions(m.meta['requirements'], - m.config.variant, 'host') + if requirements := m.get_section("requirements"): + utils.insert_variant_versions(requirements, m.config.variant, "build") + utils.insert_variant_versions(requirements, m.config.variant, "host") m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) - build_unsat, host_unsat = add_upstream_pins(m, - permit_unsatisfiable_variants, - exclude_pattern) + build_unsat, host_unsat = add_upstream_pins( + m, permit_unsatisfiable_variants, exclude_pattern + ) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. - requirements = m.meta.get('requirements', {}) + requirements = m.get_section("requirements") # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in output_excludes)) - pinning_env = 'host' if m.is_cross else 'build' + exclude_pattern = re.compile( + r"|".join(rf"(?:^{exc}(?:\s|$|\Z))" for exc in output_excludes) + ) + pinning_env = "host" if m.is_cross else "build" build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one - if build_reqs and 'python' in build_reqs: - build_reqs.append('python {}'.format(m.config.variant['python'])) - m.meta['requirements'][pinning_env] = build_reqs - - full_build_deps, _, _ = get_env_dependencies(m, pinning_env, - m.config.variant, - exclude_pattern=exclude_pattern, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) - full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) - for dep in full_build_deps} - - if isfile(m.requirements_path) and not requirements.get('run'): - requirements['run'] = specs_from_url(m.requirements_path) - run_deps = requirements.get('run', []) - - versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions) - for dep in run_deps] - versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True) - for spec in versioned_run_deps] + if build_reqs and "python" in build_reqs: + build_reqs.append("python {}".format(m.config.variant["python"])) + m.meta["requirements"][pinning_env] = build_reqs + + full_build_deps, _, _ = get_env_dependencies( + m, + pinning_env, + m.config.variant, + exclude_pattern=exclude_pattern, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) + full_build_dep_versions = { + dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps + } + + if isfile(m.requirements_path) and not requirements.get("run"): + requirements["run"] = specs_from_url(m.requirements_path) + run_deps = requirements.get("run", []) + + versioned_run_deps = [ + get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps + ] + versioned_run_deps = [ + utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps + ] requirements[pinning_env] = full_build_deps - requirements['run'] = versioned_run_deps + requirements["run"] = versioned_run_deps - m.meta['requirements'] = requirements + m.meta["requirements"] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() - if m.pin_depends == 'strict': - m.meta['requirements']['run'] = environ.get_pinned_deps( - m, 'run') - test_deps = m.get_value('test/requires') + if m.pin_depends == "strict": + m.meta["requirements"]["run"] = environ.get_pinned_deps(m, "run") + test_deps = m.get_value("test/requires") if test_deps: - versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions) - for dep in test_deps}) - versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True) - for spec in versioned_test_deps] - m.meta['test']['requires'] = versioned_test_deps - extra = m.meta.get('extra', {}) - extra['copy_test_source_files'] = m.config.copy_test_source_files - m.meta['extra'] = extra + versioned_test_deps = list( + { + get_pin_from_build(m, dep, full_build_dep_versions) + for dep in test_deps + } + ) + versioned_test_deps = [ + utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps + ] + m.meta["test"]["requires"] = versioned_test_deps + extra = m.get_section("extra") + extra["copy_test_source_files"] = m.config.copy_test_source_files + m.meta["extra"] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. - if m.meta.get('source'): - if 'path' in m.meta['source']: - source_path = m.meta['source']['path'] - os.path.expanduser(source_path) - if not os.path.isabs(source_path): - m.meta['source']['path'] = os.path.normpath( - os.path.join(m.path, source_path)) - elif ('git_url' in m.meta['source'] and not ( - # absolute paths are not relative paths - os.path.isabs(m.meta['source']['git_url']) or - # real urls are not relative paths - ":" in m.meta['source']['git_url'])): - m.meta['source']['git_url'] = os.path.normpath( - os.path.join(m.path, m.meta['source']['git_url'])) - - if not m.meta.get('build'): - m.meta['build'] = {} + for source_dict in m.get_section("source"): + if (source_path := source_dict.get("path")) and not isabs(source_path): + source_dict["path"] = normpath(join(m.path, source_path)) + elif ( + (git_url := source_dict.get("git_url")) + # absolute paths are not relative paths + and not isabs(git_url) + # real urls are not relative paths + and ":" not in git_url + ): + source_dict["git_url"] = normpath(join(m.path, git_url)) + + m.meta.setdefault("build", {}) _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) - log.warn("Returning non-final recipe for {}; one or more dependencies " - "was unsatisfiable:".format(m.dist())) + log.warn( + f"Returning non-final recipe for {m.dist()}; one or more dependencies " + "was unsatisfiable:" + ) if build_unsat: log.warn(f"Build: {build_unsat}") if host_unsat: @@ -642,18 +761,24 @@ def try_download(metadata, no_download_source, raise_error=False): try: source.provide(metadata) except subprocess.CalledProcessError as error: - print("Warning: failed to download source. If building, will try " - "again after downloading recipe dependencies.") + print( + "Warning: failed to download source. If building, will try " + "again after downloading recipe dependencies." + ) print("Error was: ") print(error) if not metadata.source_provided: if no_download_source: - raise ValueError("no_download_source specified, but can't fully render recipe without" - " downloading source. Please fix the recipe, or don't use " - "no_download_source.") + raise ValueError( + "no_download_source specified, but can't fully render recipe without" + " downloading source. Please fix the recipe, or don't use " + "no_download_source." + ) elif raise_error: - raise RuntimeError("Failed to download or patch source. Please see build log for info.") + raise RuntimeError( + "Failed to download or patch source. Please see build log for info." + ) def reparse(metadata): @@ -662,38 +787,37 @@ def reparse(metadata): metadata.final = False sys.path.insert(0, metadata.config.build_prefix) sys.path.insert(0, metadata.config.host_prefix) - py_ver = '.'.join(metadata.config.variant['python'].split('.')[:2]) + py_ver = ".".join(metadata.config.variant["python"].split(".")[:2]) sys.path.insert(0, utils.get_site_packages(metadata.config.host_prefix, py_ver)) metadata.parse_until_resolved() metadata = finalize_metadata(metadata) return metadata -def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, - allow_no_other_outputs=False, bypass_env_check=False): - rendered_metadata = {} +def distribute_variants( + metadata: MetaData, + variants, + permit_unsatisfiable_variants: bool = False, + allow_no_other_outputs: bool = False, + bypass_env_check: bool = False, +) -> list[MetaDataTuple]: + rendered_metadata: dict[ + tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple + ] = {} need_source_download = True # don't bother distributing python if it's a noarch package, and figure out # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: - from .conda_interface import VersionOrder - age = int(metadata.get_value('build/noarch_python_build_age', metadata.config.noarch_python_build_age)) - versions = [] - for variant in variants: - if 'python' in variant: - vo = variant['python'] - if vo not in versions: - versions.append(vo) - version_indices = sorted(range(len(versions)), key=lambda k: VersionOrder(versions[k].split(' ')[0])) - if age < 0: - age = 0 - elif age > len(versions) - 1: - age = len(versions) - 1 - build_ver = versions[version_indices[len(versions) - 1 - age]] - variants = filter_by_key_value(variants, 'python', build_ver, - 'noarch_python_reduction') + # filter variants by the newest Python version + version = sorted( + {version for variant in variants if (version := variant.get("python"))}, + key=lambda key: VersionOrder(key.split(" ")[0]), + )[-1] + variants = filter_by_key_value( + variants, "python", version, "noarch_python_reduction" + ) # store these for reference later metadata.config.variants = variants @@ -704,7 +828,7 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements - if hasattr(recipe_text, 'decode'): + if hasattr(recipe_text, "decode"): recipe_text = recipe_text.decode() metadata.config.variant = variants[0] @@ -712,14 +836,15 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, top_loop = metadata.get_reduced_variant_set(used_variables) for variant in top_loop: - from conda_build.build import get_all_replacements + from .build import get_all_replacements + get_all_replacements(variant) mv = metadata.copy() mv.config.variant = variant - pin_run_as_build = variant.get('pin_run_as_build', {}) - if mv.numpy_xx and 'numpy' not in pin_run_as_build: - pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} + pin_run_as_build = variant.get("pin_run_as_build", {}) + if mv.numpy_xx and "numpy" not in pin_run_as_build: + pin_run_as_build["numpy"] = {"min_pin": "x.x", "max_pin": "x.x"} conform_dict = {} for key in used_variables: @@ -729,17 +854,20 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, conform_dict[key] = variant[key] for key, values in conform_dict.items(): - mv.config.variants = (filter_by_key_value(mv.config.variants, key, values, - 'distribute_variants_reduction') or - mv.config.variants) + mv.config.variants = ( + filter_by_key_value( + mv.config.variants, key, values, "distribute_variants_reduction" + ) + or mv.config.variants + ) get_all_replacements(mv.config.variants) - pin_run_as_build = variant.get('pin_run_as_build', {}) - if mv.numpy_xx and 'numpy' not in pin_run_as_build: - pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} + pin_run_as_build = variant.get("pin_run_as_build", {}) + if mv.numpy_xx and "numpy" not in pin_run_as_build: + pin_run_as_build["numpy"] = {"min_pin": "x.x", "max_pin": "x.x"} numpy_pinned_variants = [] for _variant in mv.config.variants: - _variant['pin_run_as_build'] = pin_run_as_build + _variant["pin_run_as_build"] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants @@ -752,39 +880,78 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, mv.parse_again() try: - mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs, - bypass_env_check=bypass_env_check) + mv.parse_until_resolved( + allow_no_other_outputs=allow_no_other_outputs, + bypass_env_check=bypass_env_check, + ) except SystemExit: pass - need_source_download = (not mv.needs_source_for_render or not mv.source_provided) + need_source_download = not mv.needs_source_for_render or not mv.source_provided - rendered_metadata[(mv.dist(), - mv.config.variant.get('target_platform', mv.config.subdir), - tuple((var, mv.config.variant.get(var)) - for var in mv.get_used_vars()))] = \ - (mv, need_source_download, None) + rendered_metadata[ + ( + mv.dist(), + mv.config.variant.get("target_platform", mv.config.subdir), + tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()), + ) + ] = MetaDataTuple(mv, need_source_download, False) # list of tuples. # each tuple item is a tuple of 3 items: - # metadata, need_download, need_reparse_in_env + # metadata, need_download, need_reparse return list(rendered_metadata.values()) -def expand_outputs(metadata_tuples): +def expand_outputs( + metadata_tuples: Iterable[MetaDataTuple], +) -> list[tuple[dict, MetaData]]: """Obtain all metadata objects for all outputs from recipe. Useful for outputting paths.""" - expanded_outputs = OrderedDict() + from copy import deepcopy + + from .build import get_all_replacements - for (_m, download, reparse) in metadata_tuples: - from conda_build.build import get_all_replacements + expanded_outputs: dict[str, tuple[dict, MetaData]] = {} + + for _m, download, reparse in metadata_tuples: get_all_replacements(_m.config) - from copy import deepcopy - for (output_dict, m) in deepcopy(_m).get_output_metadata_set(permit_unsatisfiable_variants=False): + for output_dict, m in deepcopy(_m).get_output_metadata_set( + permit_unsatisfiable_variants=False + ): get_all_replacements(m.config) expanded_outputs[m.dist()] = (output_dict, m) return list(expanded_outputs.values()) -def render_recipe(recipe_path, config, no_download_source=False, variants=None, - permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): +@contextmanager +def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]: + """Open the recipe from a file (meta.yaml), directory (recipe), or tarball (package).""" + recipe = Path(recipe) + + if not recipe.exists(): + sys.exit(f"Error: non-existent: {recipe}") + elif recipe.is_dir(): + # read the recipe from the current directory + yield recipe + elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]: + # extract the recipe to a temporary directory + with TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: + tar.extractall(path=tmp) + yield Path(tmp) + elif recipe.suffix == ".yaml": + # read the recipe from the parent directory + yield recipe.parent + else: + sys.exit(f"Error: non-recipe: {recipe}") + + +def render_recipe( + recipe_dir: str | os.PathLike | Path, + config: Config, + no_download_source: bool = False, + variants: dict[str, Any] | None = None, + permit_unsatisfiable_variants: bool = True, + reset_build_id: bool = True, + bypass_env_check: bool = False, +) -> list[MetaDataTuple]: """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -792,70 +959,59 @@ def render_recipe(recipe_path, config, no_download_source=False, variants=None, You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ - arg = recipe_path - if isfile(arg): - if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): - recipe_dir = tempfile.mkdtemp() - t = tarfile.open(arg, 'r:*') - t.extractall(path=recipe_dir) - t.close() - need_cleanup = True - elif arg.endswith('.yaml'): - recipe_dir = os.path.dirname(arg) - need_cleanup = False + with open_recipe(recipe_dir) as recipe: + try: + m = MetaData(str(recipe), config=config) + except exceptions.YamlParsingError as e: + sys.exit(e.error_msg()) + + # important: set build id *before* downloading source. Otherwise source goes into a different + # build folder. + if config.set_build_id: + m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id) + + # this source may go into a folder that doesn't match the eventual build folder. + # There's no way around it AFAICT. We must download the source to be able to render + # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build + # folder until rendering is complete, because package names can have variant jinja2 in them. + if m.needs_source_for_render and not m.source_provided: + try_download(m, no_download_source=no_download_source) + + if m.final: + if not getattr(m.config, "variants", None): + m.config.ignore_system_variants = True + if isfile(cbc_yaml := join(m.path, "conda_build_config.yaml")): + m.config.variant_config_files = [cbc_yaml] + m.config.variants = get_package_variants(m, variants=variants) + m.config.variant = m.config.variants[0] + return [MetaDataTuple(m, False, False)] else: - print("Ignoring non-recipe: %s" % arg) - return None, None - else: - recipe_dir = abspath(arg) - need_cleanup = False - - if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) - - try: - m = MetaData(recipe_dir, config=config) - except exceptions.YamlParsingError as e: - sys.stderr.write(e.error_msg()) - sys.exit(1) - - rendered_metadata = {} - - # important: set build id *before* downloading source. Otherwise source goes into a different - # build folder. - if config.set_build_id: - m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id) - - # this source may go into a folder that doesn't match the eventual build folder. - # There's no way around it AFAICT. We must download the source to be able to render - # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build - # folder until rendering is complete, because package names can have variant jinja2 in them. - if m.needs_source_for_render and not m.source_provided: - try_download(m, no_download_source=no_download_source) - if m.final: - if not hasattr(m.config, 'variants') or not m.config.variant: - m.config.ignore_system_variants = True - if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')): - m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')] - m.config.variants = get_package_variants(m, variants=variants) - m.config.variant = m.config.variants[0] - rendered_metadata = [(m, False, False), ] - else: - # merge any passed-in variants with any files found - variants = get_package_variants(m, variants=variants) - - # when building, we don't want to fully expand all outputs into metadata, only expand - # whatever variants we have (i.e. expand top-level variants, not output-only variants) - rendered_metadata = distribute_variants(m, variants, - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - allow_no_other_outputs=True, bypass_env_check=bypass_env_check) - if need_cleanup: - utils.rm_rf(recipe_dir) - return rendered_metadata + # merge any passed-in variants with any files found + variants = get_package_variants(m, variants=variants) + + # when building, we don't want to fully expand all outputs into metadata, only expand + # whatever variants we have (i.e. expand top-level variants, not output-only variants) + return distribute_variants( + m, + variants, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + allow_no_other_outputs=True, + bypass_env_check=bypass_env_check, + ) # Keep this out of the function below so it can be imported by other modules. -FIELDS = ["package", "source", "build", "requirements", "test", "app", "outputs", "about", "extra"] +FIELDS = [ + "package", + "source", + "build", + "requirements", + "test", + "app", + "outputs", + "about", + "extra", +] # Next bit of stuff is to support YAML output in the order we expect. @@ -868,11 +1024,11 @@ def to_omap(self): def _represent_omap(dumper, data): - return dumper.represent_mapping('tag:yaml.org,2002:map', data.to_omap()) + return dumper.represent_mapping("tag:yaml.org,2002:map", data.to_omap()) def _unicode_representer(dumper, uni): - node = yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=uni) + node = yaml.ScalarNode(tag="tag:yaml.org,2002:str", value=uni) return node @@ -889,20 +1045,28 @@ def ignore_aliases(self, data): unicode = None # silence pyflakes about unicode not existing in py3 -def output_yaml(metadata, filename=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + filename: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: local_metadata = metadata.copy() - if suppress_outputs and local_metadata.is_output and 'outputs' in local_metadata.meta: - del local_metadata.meta['outputs'] - output = yaml.dump(_MetaYaml(local_metadata.meta), Dumper=_IndentDumper, - default_flow_style=False, indent=2) + if ( + suppress_outputs + and local_metadata.is_output + and "outputs" in local_metadata.meta + ): + del local_metadata.meta["outputs"] + output = yaml.dump( + _MetaYaml(local_metadata.meta), + Dumper=_IndentDumper, + default_flow_style=False, + indent=2, + ) if filename: - if any(sep in filename for sep in ('\\', '/')): - try: - os.makedirs(os.path.dirname(filename)) - except OSError: - pass - with open(filename, "w") as f: - f.write(output) - return "Wrote yaml to %s" % filename + filename = Path(filename) + filename.parent.mkdir(parents=True, exist_ok=True) + filename.write_text(output) + return f"Wrote yaml to {filename}" else: return output diff --git a/conda_build/skeletons/_example_skeleton.py b/conda_build/skeletons/_example_skeleton.py index 25ee72e23e..fc61c2d0b2 100644 --- a/conda_build/skeletons/_example_skeleton.py +++ b/conda_build/skeletons/_example_skeleton.py @@ -31,11 +31,13 @@ def add_parser(repos): "my_repo", help=""" Create recipe skeleton for packages hosted on my-repo.org - """,) + """, + ) my_repo.add_argument( "packages", - nargs='+', - help="my-repo packages to create recipe skeletons for.",) + nargs="+", + help="my-repo packages to create recipe skeletons for.", + ) # Add any additional parser arguments here diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 94a1efa5c7..31213054d1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -4,33 +4,35 @@ Tools for converting CPAN packages to conda recipes. """ +from __future__ import annotations import codecs -import hashlib -from pkg_resources import parse_version -from glob import glob import gzip +import hashlib import json import os -from os import makedirs -from os.path import basename, dirname, join, exists +import pickle import subprocess import sys import tempfile -import pickle from functools import lru_cache, partial - -from conda_build.conda_interface import get_index -from conda_build.conda_interface import TmpDownload, download -from conda_build.conda_interface import MatchSpec, Resolve -from conda_build.conda_interface import CondaHTTPError, CondaError - -from conda_build.config import get_or_merge_config -from conda_build.utils import on_win, check_call_env -from conda_build.variants import get_default_variant +from glob import glob +from os import makedirs +from os.path import basename, dirname, exists, join import requests -from conda_build import environ +from conda.core.index import get_index +from conda.exceptions import CondaError, CondaHTTPError +from conda.gateways.connection.download import TmpDownload, download +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.match_spec import MatchSpec +from conda.resolve import Resolve + +from .. import environ +from ..config import Config, get_or_merge_config +from ..utils import check_call_env, on_linux, on_win +from ..variants import get_default_variant +from ..version import _parse as parse_version CPAN_META = """\ {{% set name = "{packagename}" %}} @@ -146,46 +148,48 @@ class InvalidReleaseError(RuntimeError): - - ''' + """ An exception that is raised when a release is not available on MetaCPAN. - ''' + """ + pass class PerlTmpDownload(TmpDownload): - - ''' + """ Subclass Conda's TmpDownload to replace : in download filenames. Critical on win. - ''' + """ def __enter__(self): - if '://' not in self.url: + if "://" not in self.url: # if we provide the file itself, no tmp dir is created self.tmp_dir = None return self.url else: - if 'CHECKSUMS' in self.url: - turl = self.url.split('id/') + if "CHECKSUMS" in self.url: + turl = self.url.split("id/") filename = turl[1] else: filename = basename(self.url) - filename = filename.replace('::', '-') + filename = filename.replace("::", "-") self.tmp_dir = tempfile.mkdtemp() - home = os.path.expanduser('~') - base_dir = join(home, '.conda-build', 'cpan', - basename(self.url).replace('::', '-')) + home = os.path.expanduser("~") + base_dir = join( + home, ".conda-build", "cpan", basename(self.url).replace("::", "-") + ) dst = join(base_dir, filename) - dst = dst.replace('::', '-') + dst = dst.replace("::", "-") base_dir = dirname(dst) if not exists(base_dir): makedirs(base_dir) - dst = get_pickle_file_path(cache_dir=base_dir, filename_prefix=filename, other_hashed=(self.url,)) + dst = get_pickle_file_path( + cache_dir=base_dir, filename_prefix=filename, other_hashed=(self.url,) + ) if not exists(os.path.dirname(dst)): makedirs(os.path.dirname(dst)) if not exists(dst): @@ -196,22 +200,35 @@ def __enter__(self): def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): import tarfile - from conda_build import source - cached_path, _ = source.download_to_cache(src_cache, '', - {'url': package_url, - 'sha256': sha256}) + + from .. import source + + cached_path, _ = source.download_to_cache( + src_cache, "", {"url": package_url, "sha256": sha256} + ) result = [] with tarfile.open(cached_path) as tf: - need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77', '.f95', '.f03')) for f in tf]) + need_f = any( + [ + f.name.lower().endswith((".f", ".f90", ".f77", ".f95", ".f03")) + for f in tf + ] + ) # Fortran builds use CC to perform the link (they do not call the linker directly). - need_c = True if need_f else \ - any([f.name.lower().endswith('.c') for f in tf]) - need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) - for f in tf]) - need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) - need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ - any([f.name.lower().endswith(('/makefile', '/makevars')) - for f in tf]) + need_c = ( + True + if need_f + else any([f.name.lower().endswith((".c", ".xs")) for f in tf]) + ) + need_cxx = any( + [f.name.lower().endswith((".cxx", ".cpp", ".cc", ".c++")) for f in tf] + ) + need_autotools = any([f.name.lower().endswith("/configure") for f in tf]) + need_make = ( + True + if any((need_autotools, need_f, need_cxx, need_c)) + else any([f.name.lower().endswith(("/makefile", "/makevars")) for f in tf]) + ) if need_c or need_cxx or need_f: result.append("{{ compiler('c') }}") if need_cxx: @@ -226,7 +243,9 @@ def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): if need_make: result.append("make # [not win]") result.append("m2-make # [win]") - print(f"INFO :: For {os.path.basename(package_url)}, we need the following build tools:\n{result}") + print( + f"INFO :: For {os.path.basename(package_url)}, we need the following build tools:\n{result}" + ) return result @@ -242,11 +261,10 @@ def get_cpan_api_url(url, colons): with gzip.open(json_path) as dist_json_file: output = dist_json_file.read() if hasattr(output, "decode"): - output = output.decode('utf-8-sig') + output = output.decode("utf-8-sig") rel_dict = json.loads(output) except OSError: - rel_dict = json.loads(codecs.open( - json_path, encoding='utf-8').read()) + rel_dict = json.loads(codecs.open(json_path, encoding="utf-8").read()) except CondaHTTPError: rel_dict = None return rel_dict @@ -255,11 +273,11 @@ def get_cpan_api_url(url, colons): # Probably uses a system cpan? TODO :: Fix this. def package_exists(package_name): try: - cmd = ['cpan', '-D', package_name] + cmd = ["cpan", "-D", package_name] if on_win: - cmd.insert(0, '/c') - cmd.insert(0, '/d') - cmd.insert(0, 'cmd.exe') + cmd.insert(0, "/c") + cmd.insert(0, "/d") + cmd.insert(0, "cmd.exe") check_call_env(cmd) in_repo = True except subprocess.CalledProcessError: @@ -269,31 +287,33 @@ def package_exists(package_name): def md5d_file_and_other(filename, other_hashed): sha1 = hashlib.md5() - with open(filename, 'rb') as f: + with open(filename, "rb") as f: while True: data = f.read(65536) if not data: break sha1.update(data) for other in other_hashed: - sha1.update(other.encode('utf-8') if hasattr(other, 'encode') else other) + sha1.update(other.encode("utf-8") if hasattr(other, "encode") else other) return sha1.hexdigest() def get_pickle_file_path(cache_dir, filename_prefix, other_hashed=()): - h = 'h' + md5d_file_and_other(__file__, other_hashed)[2:10] - return os.path.join(cache_dir, filename_prefix.replace('::', '-') + '.' + h + '.p') + h = "h" + md5d_file_and_other(__file__, other_hashed)[2:10] + return os.path.join(cache_dir, filename_prefix.replace("::", "-") + "." + h + ".p") def load_or_pickle(filename_prefix, base_folder, data_partial, key): # It might be nice to hash the entire code tree of data_partial # along with all the args to it via hashlib instead but that's # difficult. - pickled = get_pickle_file_path(cache_dir=base_folder, filename_prefix=filename_prefix + key) + pickled = get_pickle_file_path( + cache_dir=base_folder, filename_prefix=filename_prefix + key + ) # if exists(pickled): # os.unlink(pickled) if exists(pickled): - with open(pickled, 'rb') as f: + with open(pickled, "rb") as f: key_stored = pickle.load(f) if key and key_stored and key == key_stored: return pickle.load(f) @@ -302,7 +322,7 @@ def load_or_pickle(filename_prefix, base_folder, data_partial, key): os.makedirs(os.path.dirname(pickled)) except: pass - with open(pickled, 'wb') as f: + with open(pickled, "wb") as f: pickle.dump(key, f) pickle.dump(result, f) return result @@ -310,59 +330,87 @@ def load_or_pickle(filename_prefix, base_folder, data_partial, key): def install_perl_get_core_modules(version): try: - from conda_build.config import Config - from conda_build.conda_interface import TemporaryDirectory - config = Config() - if sys.platform.startswith('win'): - subdirs = ('win-64', 'Library', 'bin', 'perl.exe') - elif sys.platform.startswith('linux'): - subdirs = ('linux-64', 'bin', 'perl') + if on_win: + subdirs = ("win-64", "Library", "bin", "perl.exe") + elif on_linux: + subdirs = ("linux-64", "bin", "perl") else: - subdirs = ('osx-64', 'bin', 'perl') + subdirs = ("osx-64", "bin", "perl") # Return one of the dist things instead? with TemporaryDirectory() as tmpdir: - environ.create_env(tmpdir, [f'perl={version}'], env='host', config=config, subdir=subdirs[0]) - args = [f'{join(tmpdir, *subdirs[1:])}', '-e', - 'use Module::CoreList; print join "\n", Module::CoreList->find_modules(qr/.*/);'] - from subprocess import check_output - all_core_modules = check_output(args, shell=False).decode('utf-8').replace('\r\n', '\n').split('\n') + environ.create_env( + tmpdir, + [f"perl={version}"], + env="host", + config=config, + subdir=subdirs[0], + ) + args = [ + f"{join(tmpdir, *subdirs[1:])}", + "-e", + "use Module::CoreList; " + "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " + 'print join "\n", @modules;', + ] + try: + all_core_modules = ( + subprocess.check_output(args, shell=False) + .decode("utf-8") + .replace("\r\n", "\n") + .split("\n") + ) + except Exception as e: + print( + f"Failed to query perl={version} for core modules list, ran:\n" + f"{' '.join(args)}" + ) + print(e.message) return all_core_modules except Exception as e: - print("Failed to query perl={} for core modules list, attempted command was:\n{}".format(version, - ' '.join(args))) + print(f"Failed to query perl={version} for core modules list.") print(e.message) return [] def get_core_modules_for_this_perl_version(version, cache_dir): - return load_or_pickle('perl-core-modules', - base_folder=cache_dir, - data_partial=partial(install_perl_get_core_modules, version), key=version) + return load_or_pickle( + "perl-core-modules", + base_folder=cache_dir, + data_partial=partial(install_perl_get_core_modules, version), + key=version, + ) # meta_cpan_url="http://api.metacpan.org", -def skeletonize(packages, output_dir=".", version=None, - meta_cpan_url="https://fastapi.metacpan.org/v1", - recursive=False, force=False, config=None, write_core=False): - ''' +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + meta_cpan_url: str = "https://fastapi.metacpan.org/v1", + recursive: bool = False, + force: bool = False, + config: Config | None = None, + write_core: bool = False, +) -> None: + """ Loops over packages, outputting conda recipes converted from CPAN metata. - ''' + """ config = get_or_merge_config(config) - cache_dir = os.path.join(config.src_cache_root, '.conda-build', 'pickled.cb') + cache_dir = os.path.join(config.src_cache_root, ".conda-build", "pickled.cb") # TODO :: Make a temp env. with perl (which we need anyway) and use whatever version # got installed instead of this. Also allow the version to be specified. - perl_version = config.variant.get('perl', get_default_variant(config)['perl']) + perl_version = config.variant.get("perl", get_default_variant(config)["perl"]) core_modules = get_core_modules_for_this_perl_version(perl_version, cache_dir) # wildcards are not valid for perl perl_version = perl_version.replace(".*", "") package_dicts = {} - indent = '\n - ' - indent_core = '\n #- ' + indent = "\n - " + indent_core = "\n #- " processed_packages = set() orig_version = version new_packages = [] @@ -381,8 +429,8 @@ def skeletonize(packages, output_dir=".", version=None, package, module = packages.pop() # If we're passed version in the same format as `PACKAGE=VERSION` # update version - if '=' in package: - package, _, version = package.partition('=') + if "=" in package: + package, _, version = package.partition("=") else: version = orig_version @@ -397,106 +445,129 @@ def skeletonize(packages, output_dir=".", version=None, # distribution name, urls. The lot. Instead we mess about with other API end-points # getting a load of nonsense. orig_package = package - package = dist_for_module(meta_cpan_url, cache_dir, core_modules, module if module else package) - if package == 'perl': - print(("WARNING: {0} is a Perl core module that is not developed " + - "outside of Perl, so we are skipping creating a recipe " + - "for it.").format(orig_package)) + package = dist_for_module( + meta_cpan_url, cache_dir, core_modules, module if module else package + ) + if package == "perl": + print( + f"WARNING: {orig_package} is a Perl core module that is not developed " + f"outside of Perl, so we are skipping creating a recipe " + f"for it." + ) continue - elif package not in {orig_package, orig_package.replace('::', '-')}: + elif package not in {orig_package, orig_package.replace("::", "-")}: print( - ("WARNING: {0} was part of the {1} distribution, so we are " + - "making a recipe for {1} instead.").format(orig_package, - package) + f"WARNING: {orig_package} was part of the {package} distribution, so we are " + f"making a recipe for {package} instead." ) - latest_release_data = get_release_info(meta_cpan_url, cache_dir, core_modules, - module if module else orig_package, version) + latest_release_data = get_release_info( + meta_cpan_url, + cache_dir, + core_modules, + module if module else orig_package, + version, + ) packagename = perl_to_conda(package) # Skip duplicates - if ((version is not None and ((packagename + '-' + version) in - processed_packages)) or - ((packagename + '-' + latest_release_data['version']) in - processed_packages)): + if ( + version is not None + and ((packagename + "-" + version) in processed_packages) + ) or ( + (packagename + "-" + latest_release_data["version"]) in processed_packages + ): continue - d = package_dicts.setdefault(package, {'packagename': packagename, - 'build_depends': '', - 'host_depends': '', - 'run_depends': '', - 'build_comment': '# ', - 'test_commands': '', - 'usesha256': '', - 'useurl': '', - 'source_comment': '', - 'summary': "''", - 'import_tests': ''}) + d = package_dicts.setdefault( + package, + { + "packagename": packagename, + "build_depends": "", + "host_depends": "", + "run_depends": "", + "build_comment": "# ", + "test_commands": "", + "usesha256": "", + "useurl": "", + "source_comment": "", + "summary": "''", + "import_tests": "", + }, + ) # Fetch all metadata from CPAN if version is None: release_data = latest_release_data else: - release_data = get_release_info(meta_cpan_url, cache_dir, core_modules, package, - parse_version(version)) + release_data = get_release_info( + meta_cpan_url, cache_dir, core_modules, package, parse_version(version) + ) # Check if recipe directory already exists - dir_path = join(output_dir, packagename, release_data['version']) + dir_path = join(output_dir, packagename, release_data["version"]) # Add Perl version to core module requirements, since these are empty # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): - if not write_core: - print('We found core module %s. Skipping recipe creation.' % - packagename) + print(f"We found core module {packagename}. Skipping recipe creation.") continue - d['useurl'] = '#' - d['usesha256'] = '#' - d['source_comment'] = '#' + d["useurl"] = "#" + d["usesha256"] = "#" + d["source_comment"] = "#" empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: - deps, packages_to_append = \ - deps_for_package(package, release_data=release_data, - output_dir=output_dir, cache_dir=cache_dir, - meta_cpan_url=meta_cpan_url, recursive=recursive, core_modules=core_modules) + deps, packages_to_append = deps_for_package( + package, + release_data=release_data, + output_dir=output_dir, + cache_dir=cache_dir, + meta_cpan_url=meta_cpan_url, + recursive=recursive, + core_modules=core_modules, + ) # If this is something we're downloading, get MD5 - d['cpanurl'] = '' - d['sha256'] = '' - if release_data.get('download_url'): - d['cpanurl'] = release_data['download_url'] - d['sha256'], size = get_checksum_and_size(release_data['download_url']) - print("Using url {} ({}) for {}.".format(d['cpanurl'], size, package)) - src_build_depends = get_build_dependencies_from_src_archive(release_data['download_url'], - d['sha256'], config.src_cache) + d["cpanurl"] = "" + d["sha256"] = "" + if release_data.get("download_url"): + d["cpanurl"] = release_data["download_url"] + d["sha256"], size = get_checksum_and_size(release_data["download_url"]) + print("Using url {} ({}) for {}.".format(d["cpanurl"], size, package)) + src_build_depends = get_build_dependencies_from_src_archive( + release_data["download_url"], d["sha256"], config.src_cache + ) else: src_build_depends = [] - d['useurl'] = '#' - d['usesha256'] = '#' - d['source_comment'] = '#' + d["useurl"] = "#" + d["usesha256"] = "#" + d["source_comment"] = "#" - d['build_depends'] += indent.join([''] + src_build_depends) + d["build_depends"] += indent.join([""] + src_build_depends) -# d['build_depends'] += indent_core.join([''] + list(deps['build']['core'] | -# deps['run']['core'])) + # d['build_depends'] += indent_core.join([''] + list(deps['build']['core'] | + # deps['run']['core'])) - d['host_depends'] += indent.join([''] + list(deps['build']['noncore'] | - deps['run']['noncore'])) + d["host_depends"] += indent.join( + [""] + list(deps["build"]["noncore"] | deps["run"]["noncore"]) + ) # run_exports will set these, but: # TODO :: Add ignore_run_exports for things in deps['build'] that are not also # in deps['run'] - d['run_depends'] += indent_core.join([''] + list(deps['run']['noncore'])) + d["run_depends"] += indent_core.join([""] + list(deps["run"]["noncore"])) # Make sure we append any packages before continuing for pkg in packages_to_append: if pkg not in packages: packages.append(pkg) else: - print("INFO :: Already building package {} (module {})".format(*pkg)) + print( + "INFO :: Already building package {} (module {})".format(*pkg) + ) empty_recipe = False # If we are recursively getting packages for a particular version @@ -504,57 +575,59 @@ def skeletonize(packages, output_dir=".", version=None, version = None if exists(dir_path) and not force: print( - 'Directory %s already exists and you have not specified --force ' % dir_path) + f"Directory {dir_path} already exists and you have not specified --force " + ) continue elif exists(dir_path) and force: - print('Directory %s already exists, but forcing recipe creation' % dir_path) + print(f"Directory {dir_path} already exists, but forcing recipe creation") try: - d['homeurl'] = release_data['resources']['homepage'] + d["homeurl"] = release_data["resources"]["homepage"] except KeyError: - d['homeurl'] = 'http://metacpan.org/pod/' + package - if 'abstract' in release_data: + d["homeurl"] = "http://metacpan.org/pod/" + package + if "abstract" in release_data: # TODO this does not escape quotes in a YAML friendly manner - summary = repr(release_data['abstract']).lstrip('u') - d['summary'] = summary + summary = repr(release_data["abstract"]).lstrip("u") + d["summary"] = summary # d['summary'] = repr(release_data['abstract']).lstrip('u') try: - d['license'] = (release_data['license'][0] if - isinstance(release_data['license'], list) else - release_data['license']) + d["license"] = ( + release_data["license"][0] + if isinstance(release_data["license"], list) + else release_data["license"] + ) except KeyError: - d['license'] = 'perl_5' - d['version'] = release_data['version'] + d["license"] = "perl_5" + d["version"] = release_data["version"] - processed_packages.add(packagename + '-' + d['version']) + processed_packages.add(packagename + "-" + d["version"]) # Create import tests - module_prefix = package.replace('::', '-').split('-')[0] - if 'provides' in release_data: - for provided_mod in sorted(set(release_data['provides'])): + module_prefix = package.replace("::", "-").split("-")[0] + if "provides" in release_data: + for provided_mod in sorted(set(release_data["provides"])): # Filter out weird modules that don't belong - if (provided_mod.startswith(module_prefix) and - '::_' not in provided_mod): - d['import_tests'] += indent + provided_mod - if d['import_tests']: - d['import_comment'] = '' + if provided_mod.startswith(module_prefix) and "::_" not in provided_mod: + d["import_tests"] += indent + provided_mod + if d["import_tests"]: + d["import_comment"] = "" else: - d['import_comment'] = '# ' + d["import_comment"] = "# " if not exists(dir_path): makedirs(dir_path) # Write recipe files to a directory # TODO def write_recipe - print("Writing recipe for {}-{}".format(packagename, d['version'])) - with open(join(dir_path, 'meta.yaml'), 'wb') as f: - f.write(CPAN_META.format(**d).encode('utf-8')) - with open(join(dir_path, 'build.sh'), 'wb') as f: + print("Writing recipe for {}-{}".format(packagename, d["version"])) + with open(join(dir_path, "meta.yaml"), "wb") as f: + f.write(CPAN_META.format(**d).encode("utf-8")) + with open(join(dir_path, "build.sh"), "wb") as f: if empty_recipe: f.write(b'#!/bin/bash\necho "Nothing to do."\n') else: - f.write(CPAN_BUILD_SH.format(**d).encode('utf-8')) - with open(join(dir_path, 'bld.bat'), 'w') as f: + f.write(CPAN_BUILD_SH.format(**d).encode("utf-8")) + with open(join(dir_path, "bld.bat"), "w") as f: if empty_recipe: f.write('echo "Nothing to do."\n') else: @@ -565,8 +638,9 @@ def skeletonize(packages, output_dir=".", version=None, def is_core_version(core_version, version): if core_version is None: return False - elif core_version is not None and ((version in [None, '']) or - (core_version >= parse_version(version))): + elif core_version is not None and ( + (version in [None, ""]) or (core_version >= parse_version(version)) + ): return True else: return False @@ -578,41 +652,49 @@ def add_parser(repos): help=""" Create recipe skeleton for packages hosted on the Comprehensive Perl Archive Network (CPAN) (cpan.org). - """,) + """, + ) cpan.add_argument( "packages", - nargs='+', - help="CPAN packages to create recipe skeletons for.",) + nargs="+", + help="CPAN packages to create recipe skeletons for.", + ) cpan.add_argument( "--output-dir", help="Directory to write recipes to (default: %(default)s).", - default=".",) + default=".", + ) cpan.add_argument( "--version", - help="Version to use. Applies to all packages.",) + help="Version to use. Applies to all packages.", + ) cpan.add_argument( "--meta-cpan-url", - default='https://fastapi.metacpan.org/v1', - help="URL to use for MetaCPAN API. It must include a version, such as v1",) + default="https://fastapi.metacpan.org/v1", + help="URL to use for MetaCPAN API. It must include a version, such as v1", + ) cpan.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist (default: %(default)s).') + action="store_true", + help="Create recipes for dependencies if they do not already exist (default: %(default)s).", + ) cpan.add_argument( "--force", - action='store_true', - help='Force overwrite of existing recipes (default: %(default)s).') + action="store_true", + help="Force overwrite of existing recipes (default: %(default)s).", + ) cpan.add_argument( "--write_core", - action='store_true', - help='Write recipes for perl core modules (default: %(default)s). ') + action="store_true", + help="Write recipes for perl core modules (default: %(default)s). ", + ) @lru_cache(maxsize=None) def latest_pkg_version(pkg): - ''' + """ :returns: the latest version of the specified conda package available - ''' + """ r = Resolve(get_index()) try: pkg_list = sorted(r.get_pkgs(MatchSpec(pkg))) @@ -625,9 +707,10 @@ def latest_pkg_version(pkg): return pkg_version -def deps_for_package(package, release_data, output_dir, cache_dir, - meta_cpan_url, recursive, core_modules): - ''' +def deps_for_package( + package, release_data, output_dir, cache_dir, meta_cpan_url, recursive, core_modules +): + """ Build the sets of dependencies and packages we need recipes for. This should only be called for non-core modules/distributions, as dependencies are ignored for core modules. @@ -647,83 +730,96 @@ def deps_for_package(package, release_data, output_dir, cache_dir, :returns: Build dependencies, runtime dependencies, and set of packages to add to list of recipes to create. :rtype: 3-tuple of sets - ''' + """ # Create lists of dependencies - deps = {'build': {'core': set(), 'noncore': set()}, - 'test': {'core': set(), 'noncore': set()}, - 'run': {'core': set(), 'noncore': set()}} - phase_to_dep_type = {'build': 'build', - 'configure': 'build', - 'test': 'test', - 'runtime': 'run', - # TODO :: Check this, I am unsure about it .. - # These (sometimes?) reference sub-components of modules - # e.g. inc::MMPackageStash instead of inc which does not - # get found on metacpan fastapi. We may need to chop the - # suffix off an try again (and repeat until we find it). - 'x_Dist_Zilla': None, - 'develop': None} + deps = { + "build": {"core": set(), "noncore": set()}, + "test": {"core": set(), "noncore": set()}, + "run": {"core": set(), "noncore": set()}, + } + phase_to_dep_type = { + "build": "build", + "configure": "build", + "test": "test", + "runtime": "run", + # TODO :: Check this, I am unsure about it .. + # These (sometimes?) reference sub-components of modules + # e.g. inc::MMPackageStash instead of inc which does not + # get found on metacpan fastapi. We may need to chop the + # suffix off an try again (and repeat until we find it). + "x_Dist_Zilla": None, + "develop": None, + } packages_to_append = set() - print('Processing dependencies for %s...' % package, end='') + print(f"Processing dependencies for {package}...", end="") sys.stdout.flush() - if not release_data.get('dependency'): + if not release_data.get("dependency"): return deps, packages_to_append # release_data['dependency'] = ['FindBin-libs' if r == 'FindBin' else r for r in release_data['dependency']] new_deps = [] - for dep in release_data['dependency']: - if 'phase' in dep and dep['phase'] == 'develop': - print("Skipping develop dependency {}".format(dep['module'])) + for dep in release_data["dependency"]: + if "phase" in dep and dep["phase"] == "develop": + print("Skipping develop dependency {}".format(dep["module"])) continue - elif 'module' in dep and dep['module'] == 'FindBin': - dep['module'] = 'FindBin::Bin' - elif 'module' in dep and dep['module'] == 'Exporter': - dep['module'] = 'Exporter' + elif "module" in dep and dep["module"] == "FindBin": + dep["module"] = "FindBin::Bin" + elif "module" in dep and dep["module"] == "Exporter": + dep["module"] = "Exporter" new_deps.append(dep) - release_data['dependency'] = new_deps + release_data["dependency"] = new_deps - for dep_dict in release_data['dependency']: + for dep_dict in release_data["dependency"]: # Only care about requirements try: - if dep_dict['relationship'] == 'requires': - if not phase_to_dep_type[dep_dict['phase']]: + if dep_dict["relationship"] == "requires": + if not phase_to_dep_type[dep_dict["phase"]]: continue - if 'module' in dep_dict and dep_dict['module'] == 'common::sense': - print('debug common::sense version mismatch') - print('.', end='') + if "module" in dep_dict and dep_dict["module"] == "common::sense": + print("debug common::sense version mismatch") + print(".", end="") sys.stdout.flush() # Format dependency string (with Perl trailing dist comment) - orig_dist = dist_for_module(meta_cpan_url, cache_dir, core_modules, dep_dict['module']) + orig_dist = dist_for_module( + meta_cpan_url, cache_dir, core_modules, dep_dict["module"] + ) dep_entry = perl_to_conda(orig_dist) # Skip perl as a dependency, since it's already in list - if orig_dist.lower() == 'perl': + if orig_dist.lower() == "perl": continue # See if version is specified # There is a dep version and a pkg_version ... why? - if dep_dict['version'] in {'', 'undef'}: - dep_dict['version'] = '0' - dep_version = parse_version(dep_dict['version']) + if dep_dict["version"] in {"", "undef"}: + dep_dict["version"] = "0" + dep_version = parse_version(dep_dict["version"]) # Make sure specified version is valid # TODO def valid_release_info try: - get_release_info(meta_cpan_url, cache_dir, core_modules, dep_dict['module'], dep_version) + get_release_info( + meta_cpan_url, + cache_dir, + core_modules, + dep_dict["module"], + dep_version, + ) except InvalidReleaseError: - print(('WARNING: The version of %s listed as a ' + - 'dependency for %s, %s, is not available on MetaCPAN, ' + - 'so we are just assuming the latest version is ' + - 'okay.') % (orig_dist, package, str(dep_version))) - dep_version = parse_version('0') + print( + f"WARNING: The version of {orig_dist} listed as a " + f"dependency for {package}, {dep_version}, is not available on MetaCPAN, " + f"so we are just assuming the latest version is " + f"okay." + ) + dep_version = parse_version("0") # Add version number to dependency, if it's newer than latest # we have package for. - if loose_version(dep_version) > loose_version('0'): - + if loose_version(dep_version) > loose_version("0"): pkg_version = latest_pkg_version(dep_entry) # If we don't have a package, use core version as version if pkg_version is None: @@ -731,7 +827,9 @@ def deps_for_package(package, release_data, output_dir, cache_dir, # perl_version, # config=config) # print('dep entry is {}'.format(dep_entry)) - pkg_version = metacpan_api_get_core_version(core_modules, dep_dict['module']) + pkg_version = metacpan_api_get_core_version( + core_modules, dep_dict["module"] + ) # If no package is available at all, it's in the core, or # the latest is already good enough, don't specify version. # This is because conda doesn't support > in version @@ -739,39 +837,40 @@ def deps_for_package(package, release_data, output_dir, cache_dir, # J = Conda does support >= ? try: if pkg_version is not None and ( - loose_version(dep_version) > loose_version(pkg_version)): - dep_entry += ' ' + dep_dict['version'] + loose_version(dep_version) > loose_version(pkg_version) + ): + dep_entry += " " + dep_dict["version"] except Exception: - print( - 'We have got an expected error with dependency versions') - print('Module {}'.format(dep_dict['module'])) - print(f'Pkg_version {pkg_version}') - print(f'Dep Version {dep_version}') + print("We have got an expected error with dependency versions") + print("Module {}".format(dep_dict["module"])) + print(f"Pkg_version {pkg_version}") + print(f"Dep Version {dep_version}") # If recursive, check if we have a recipe for this dependency if recursive: # If dependency entry is versioned, make sure this is too - if ' ' in dep_entry: - if not exists(join(output_dir, dep_entry.replace('::', - '-'))): - packages_to_append.add(('='.join((orig_dist, - dep_dict['version']))), - dep_dict['module']) - elif not glob(join(output_dir, (dep_entry + '-[v1-9][0-9.]*'))): - packages_to_append.add((orig_dist, dep_dict['module'])) + if " " in dep_entry: + if not exists(join(output_dir, dep_entry.replace("::", "-"))): + packages_to_append.add( + ("=".join((orig_dist, dep_dict["version"]))), + dep_dict["module"], + ) + elif not glob(join(output_dir, (dep_entry + "-[v1-9][0-9.]*"))): + packages_to_append.add((orig_dist, dep_dict["module"])) # Add to appropriate dependency list - core = metacpan_api_is_core_version( - meta_cpan_url, dep_dict['module']) + core = metacpan_api_is_core_version(meta_cpan_url, dep_dict["module"]) - cb_phase = phase_to_dep_type[dep_dict['phase']] + cb_phase = phase_to_dep_type[dep_dict["phase"]] if cb_phase: if core: - deps[cb_phase]['core'].add(dep_entry) + deps[cb_phase]["core"].add(dep_entry) else: - deps[cb_phase]['noncore'].add(dep_entry) + deps[cb_phase]["noncore"].add(dep_entry) else: - print("Skipping {} dependency {}".format(dep_dict['phase'], dep_entry)) + print( + "Skipping {} dependency {}".format(dep_dict["phase"], dep_entry) + ) # seemingly new in conda 4.3: HTTPErrors arise when we ask for # something that is a # perl module, but not a package. @@ -779,50 +878,53 @@ def deps_for_package(package, release_data, output_dir, cache_dir, except (CondaError, CondaHTTPError): continue - print(f'module {package} adds {packages_to_append}') + print(f"module {package} adds {packages_to_append}") return deps, packages_to_append def dist_for_module(cpan_url, cache_dir, core_modules, module): - ''' + """ Given a name that could be a module or a distribution, return the distribution. - ''' - if 'Git::Check' in module: - print('debug this') + """ + if "Git::Check" in module: + print("debug this") # First check if it is a core module, those mask distributions here, or at least they # do in the case of `import Exporter` distribution = None try: mod_dict = core_module_dict(core_modules, module) - distribution = mod_dict['distribution'] + distribution = mod_dict["distribution"] except: # Next check if its already a distribution rel_dict = release_module_dict(cpan_url, cache_dir, module) if rel_dict is not None: - if rel_dict['distribution'] != module.replace('::', '-'): - print("WARNING :: module {} found in distribution {}".format(module, rel_dict['distribution'])) - distribution = rel_dict['distribution'] + if rel_dict["distribution"] != module.replace("::", "-"): + print( + "WARNING :: module {} found in distribution {}".format( + module, rel_dict["distribution"] + ) + ) + distribution = rel_dict["distribution"] if not distribution: - print('debug') + print("debug") assert distribution, "dist_for_module must succeed" return distribution def release_module_dict_direct(cpan_url, cache_dir, module): - - if 'Dist-Zilla-Plugin-Git' in module: + if "Dist-Zilla-Plugin-Git" in module: print(f"debug {module}") - elif 'Dist::Zilla::Plugin::Git' in module: + elif "Dist::Zilla::Plugin::Git" in module: print(f"debug {module}") - elif 'Time::Zone' in module: + elif "Time::Zone" in module: print(f"debug {module}") try: - url_module = f'{cpan_url}/module/{module}' - print(f'INFO :: url_module {url_module}') + url_module = f"{cpan_url}/module/{module}" + print(f"INFO :: url_module {url_module}") rel_dict = get_cpan_api_url(url_module, colons=True) except RuntimeError: rel_dict = None @@ -830,14 +932,16 @@ def release_module_dict_direct(cpan_url, cache_dir, module): rel_dict = None if not rel_dict: print(f"WARNING :: Did not find rel_dict for module {module}") - distribution = module.replace('::', '-') - if not rel_dict or 'dependency' not in rel_dict: - if rel_dict and 'distribution' in rel_dict: - distribution = rel_dict['distribution'] + distribution = module.replace("::", "-") + if not rel_dict or "dependency" not in rel_dict: + if rel_dict and "distribution" in rel_dict: + distribution = rel_dict["distribution"] else: - print(f"WARNING :: 'distribution' was not in {module}'s module info, making it up") + print( + f"WARNING :: 'distribution' was not in {module}'s module info, making it up" + ) try: - url_release = f'{cpan_url}/release/{distribution}' + url_release = f"{cpan_url}/release/{distribution}" rel_dict2 = get_cpan_api_url(url_release, colons=False) rel_dict = rel_dict2 except RuntimeError: @@ -846,35 +950,37 @@ def release_module_dict_direct(cpan_url, cache_dir, module): rel_dict = None else: print(f"INFO :: OK, found 'dependency' in module {module}") - if not rel_dict or 'dependency' not in rel_dict: - print("WARNING :: No dependencies found for module {} in distribution {}\n" - "WARNING :: Please check {} and {}".format(module, distribution, url_module, url_release)) + if not rel_dict or "dependency" not in rel_dict: + print( + f"WARNING :: No dependencies found for module {module} in distribution {distribution}\n" + f"WARNING :: Please check {url_module} and {url_release}" + ) return rel_dict def release_module_dict(cpan_url, cache_dir, module): - if 'Regexp-Common' in module: + if "Regexp-Common" in module: print("debug") rel_dict = release_module_dict_direct(cpan_url, cache_dir, module) if not rel_dict: # In this case, the module may be a submodule of another dist, let's try something else. # An example of this is Dist::Zilla::Plugin::Git::Check. - pickled = get_pickle_file_path(cache_dir, module + '.dl_url') - url = f'{cpan_url}/download_url/{module}' + pickled = get_pickle_file_path(cache_dir, module + ".dl_url") + url = f"{cpan_url}/download_url/{module}" try: os.makedirs(os.path.dirname(pickled)) except: pass download(url, pickled) - with open(pickled, 'rb') as dl_url_json: + with open(pickled, "rb") as dl_url_json: output = dl_url_json.read() if hasattr(output, "decode"): - output = output.decode('utf-8-sig') + output = output.decode("utf-8-sig") dl_url_dict = json.loads(output) - if dl_url_dict['release'].endswith(dl_url_dict['version']): + if dl_url_dict["release"].endswith(dl_url_dict["version"]): # Easy case. print(f"Up to date: {module}") - dist = dl_url_dict['release'].replace('-' + dl_url_dict['version'], '') + dist = dl_url_dict["release"].replace("-" + dl_url_dict["version"], "") else: # Difficult case. print(f"Not up to date: {module}") @@ -891,8 +997,8 @@ def release_module_dict(cpan_url, cache_dir, module): # # .. there is no field that lists a version of '2.33' in the data. We need # to inspect the tarball. - dst = os.path.join(cache_dir, basename(dl_url_dict['download_url'])) - download(dl_url_dict['download_url'], dst) + dst = os.path.join(cache_dir, basename(dl_url_dict["download_url"])) + download(dl_url_dict["download_url"], dst) with gzip.open(dst) as dist_json_file: output = dist_json_file.read() # (base) Rays-Mac-Pro:Volumes rdonnelly$ cpan -D Time::Zone @@ -902,55 +1008,56 @@ def release_module_dict(cpan_url, cache_dir, module): def core_module_dict_old(cpan_url, module): - if 'FindBin' in module: - print('debug') - if 'Exporter' in module: - print('debug') + if "FindBin" in module: + print("debug") + if "Exporter" in module: + print("debug") try: - mod_dict = get_cpan_api_url( - f'{cpan_url}/module/{module}', colons=True) + mod_dict = get_cpan_api_url(f"{cpan_url}/module/{module}", colons=True) # If there was an error, report it except CondaHTTPError as e: - sys.exit(('Error: Could not find module or distribution named' - ' %s on MetaCPAN. Error was: %s') % (module, e.message)) + sys.exit( + f"Error: Could not find module or distribution named" + f" {module} on MetaCPAN. Error was: {e.message}" + ) else: - mod_dict = {'distribution': 'perl'} + mod_dict = {"distribution": "perl"} return mod_dict def core_module_dict(core_modules, module): if module in core_modules: - return {'distribution': 'perl'} + return {"distribution": "perl"} return None @lru_cache(maxsize=None) def metacpan_api_is_core_version(cpan_url, module): - if 'FindBin' in module: - print('debug') - url = f'{cpan_url}/release/{module}' + if "FindBin" in module: + print("debug") + url = f"{cpan_url}/release/{module}" url = url.replace("::", "-") req = requests.get(url) if req.status_code == 200: return False else: - url = f'{cpan_url}/module/{module}' + url = f"{cpan_url}/module/{module}" req = requests.get(url) if req.status_code == 200: return True else: - sys.exit(('Error: Could not find module or distribution named' - ' %s on MetaCPAN.') - % (module)) + sys.exit( + "Error: Could not find module or distribution named" + f" {module} on MetaCPAN." + ) def metacpan_api_get_core_version(core_modules, module): - module_dict = core_module_dict(core_modules, module) try: - version = module_dict['module'][-1]['version'] + version = module_dict["module"][-1]["version"] except Exception: version = None @@ -958,10 +1065,10 @@ def metacpan_api_get_core_version(core_modules, module): def get_release_info(cpan_url, cache_dir, core_modules, package, version): - ''' + """ Return a dictionary of the JSON information stored at cpan.metacpan.org corresponding to the given package/dist/module. - ''' + """ # Transform module name to dist name if necessary orig_package = package package = dist_for_module(cpan_url, cache_dir, core_modules, package) @@ -969,60 +1076,67 @@ def get_release_info(cpan_url, cache_dir, core_modules, package, version): # Get latest info to find author, which is necessary for retrieving a # specific version try: - rel_dict = get_cpan_api_url( - f'{cpan_url}/release/{package}', colons=False) - rel_dict['version'] = str(rel_dict['version']).lstrip('v') + rel_dict = get_cpan_api_url(f"{cpan_url}/release/{package}", colons=False) + rel_dict["version"] = str(rel_dict["version"]).lstrip("v") except CondaHTTPError: core_version = metacpan_api_is_core_version(cpan_url, package) - if core_version is not None and (version is None or - (version == core_version)): - print(("WARNING: {0} is not available on MetaCPAN, but it's a " + - "core module, so we do not actually need the source file, " + - "and are omitting the URL and MD5 from the recipe " + - "entirely.").format(orig_package)) - rel_dict = {'version': str(core_version), 'download_url': '', - 'license': ['perl_5'], 'dependency': {}} + if core_version is not None and (version is None or (version == core_version)): + print( + f"WARNING: {orig_package} is not available on MetaCPAN, but it's a " + f"core module, so we do not actually need the source file, " + f"and are omitting the URL and MD5 from the recipe " + f"entirely." + ) + rel_dict = { + "version": str(core_version), + "download_url": "", + "license": ["perl_5"], + "dependency": {}, + } else: - sys.exit(("Error: Could not find any versions of package %s on " + - "MetaCPAN.") % (orig_package)) + sys.exit( + ("Error: Could not find any versions of package %s on " + "MetaCPAN.") + % (orig_package) + ) version_mismatch = False if version is not None: version_str = str(version) - rel_version = str(rel_dict['version']) + rel_version = str(rel_dict["version"]) loose_str = str(parse_version(version_str)) try: version_mismatch = (version is not None) and ( - loose_version('0') != loose_version(version_str) and - parse_version(rel_version) != loose_version(version_str)) + loose_version("0") != loose_version(version_str) + and parse_version(rel_version) != loose_version(version_str) + ) # print(version_mismatch) except Exception as e: - print('We have some strange version mismatches. Please investigate.') + print("We have some strange version mismatches. Please investigate.") print(e) - print(f'Package {package}') - print(f'Version {version}') - print('Pkg Version {}'.format(rel_dict['version'])) - print(f'Loose Version {loose_str}') + print(f"Package {package}") + print(f"Version {version}") + print("Pkg Version {}".format(rel_dict["version"])) + print(f"Loose Version {loose_str}") # TODO - check for major/minor version mismatches # Allow for minor if version_mismatch: - print(f'WARNING :: Version mismatch in {package}') - print(f'WARNING :: Version: {version_str}, RelVersion: {rel_version}') + print(f"WARNING :: Version mismatch in {package}") + print(f"WARNING :: Version: {version_str}, RelVersion: {rel_version}") return rel_dict def get_checksum_and_size(download_url): - ''' + """ Looks in the CHECKSUMS file in the same directory as the file specified at download_url and returns the sha256 hash and file size. - ''' + """ base_url = dirname(download_url) filename = basename(download_url) - with PerlTmpDownload(base_url + '/CHECKSUMS') as checksum_path: + with PerlTmpDownload(base_url + "/CHECKSUMS") as checksum_path: with open(checksum_path) as checksum_file: found_file = False sha256 = None @@ -1038,11 +1152,11 @@ def get_checksum_and_size(download_url): size = line.split("=>")[1].strip("', ") break # This should never happen, but just in case - elif line.startswith('}'): + elif line.startswith("}"): break return sha256, size def perl_to_conda(name): - ''' Sanitizes a Perl package name for use as a conda package name. ''' - return 'perl-' + name.replace('::', '-').lower() + """Sanitizes a Perl package name for use as a conda package name.""" + return "perl-" + name.replace("::", "-").lower() diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 9c9e0e2c72..93958333fb 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -4,22 +4,33 @@ Tools for converting Cran packages to conda recipes. """ +from __future__ import annotations import argparse import copy -from itertools import chain -from os import makedirs, listdir, sep, environ -from os.path import (basename, commonprefix, exists, isabs, isdir, - isfile, join, normpath, realpath, relpath) +import hashlib import re import subprocess import sys -import hashlib - -import requests import tarfile -import zipfile import unicodedata +import zipfile +from itertools import chain +from os import environ, listdir, makedirs, sep +from os.path import ( + basename, + commonprefix, + exists, + isabs, + isdir, + isfile, + join, + normpath, + realpath, + relpath, +) + +import requests import yaml # try to import C dumper @@ -28,12 +39,23 @@ except ImportError: from yaml import SafeDumper -from conda_build import source, metadata -from conda_build.config import get_or_merge_config -from conda_build.conda_interface import TemporaryDirectory, cc_conda_build -from conda_build.license_family import allowed_license_families, guess_license_family -from conda_build.utils import rm_rf, ensure_list -from conda_build.variants import get_package_variants, DEFAULT_VARIANTS +from typing import TYPE_CHECKING + +from conda.base.context import context +from conda.common.io import dashlist +from conda.gateways.disk.create import TemporaryDirectory + +from .. import source +from ..config import get_or_merge_config +from ..license_family import allowed_license_families, guess_license_family +from ..metadata import MetaData +from ..utils import ensure_list, rm_rf +from ..variants import DEFAULT_VARIANTS, get_package_variants + +if TYPE_CHECKING: + from typing import Literal + + from ..config import Config SOURCE_META = """\ {archive_keys} @@ -186,7 +208,7 @@ popd fi fi -""" +""" # noqa: E501 CRAN_BUILD_SH_BINARY = """\ #!/bin/bash @@ -215,84 +237,85 @@ exit 0 """ -INDENT = '\n - ' +INDENT = "\n - " CRAN_KEYS = [ - 'Site', - 'Archs', - 'Depends', - 'Enhances', - 'Imports', - 'License', - 'License_is_FOSS', - 'License_restricts_use', - 'LinkingTo', - 'MD5sum', - 'NeedsCompilation', - 'OS_type', - 'Package', - 'Path', - 'Priority', - 'Suggests', - 'Version', - - 'Title', - 'Author', - 'Maintainer', + "Site", + "Archs", + "Depends", + "Enhances", + "Imports", + "License", + "License_is_FOSS", + "License_restricts_use", + "LinkingTo", + "MD5sum", + "NeedsCompilation", + "OS_type", + "Package", + "Path", + "Priority", + "Suggests", + "Version", + "Title", + "Author", + "Maintainer", ] # The following base/recommended package names are derived from R's source # tree (R-3.0.2/share/make/vars.mk). Hopefully they don't change too much # between versions. R_BASE_PACKAGE_NAMES = ( - 'base', - 'compiler', - 'datasets', - 'graphics', - 'grDevices', - 'grid', - 'methods', - 'parallel', - 'splines', - 'stats', - 'stats4', - 'tcltk', - 'tools', - 'utils', + "base", + "compiler", + "datasets", + "graphics", + "grDevices", + "grid", + "methods", + "parallel", + "splines", + "stats", + "stats4", + "tcltk", + "tools", + "utils", ) R_RECOMMENDED_PACKAGE_NAMES = ( - 'MASS', - 'lattice', - 'Matrix', - 'nlme', - 'survival', - 'boot', - 'cluster', - 'codetools', - 'foreign', - 'KernSmooth', - 'rpart', - 'class', - 'nnet', - 'spatial', - 'mgcv', + "MASS", + "lattice", + "Matrix", + "nlme", + "survival", + "boot", + "cluster", + "codetools", + "foreign", + "KernSmooth", + "rpart", + "class", + "nnet", + "spatial", + "mgcv", ) # Stolen then tweaked from debian.deb822.PkgRelation.__dep_RE. VERSION_DEPENDENCY_REGEX = re.compile( - r'^\s*(?P[a-zA-Z0-9.+\-]{1,})' - r'(\s*\(\s*(?P[>=<]+)\s*' - r'(?P[0-9a-zA-Z:\-+~.]+)\s*\))' - r'?(\s*\[(?P[\s!\w\-]+)\])?\s*$' + r"^\s*(?P[a-zA-Z0-9.+\-]{1,})" + r"(\s*\(\s*(?P[>=<]+)\s*" + r"(?P[0-9a-zA-Z:\-+~.]+)\s*\))" + r"?(\s*\[(?P[\s!\w\-]+)\])?\s*$" ) -target_platform_bash_test_by_sel = {'linux': '=~ linux.*', - 'linux32': '== linux-32', - 'linux64': '== linux-64', - 'win32': '== win-32', - 'win64': '== win-64', - 'osx': '== osx-64'} +target_platform_bash_test_by_sel = { + "linux": "=~ linux.*", + "linux32": "== linux-32", + "linux64": "== linux-64", + "win32": "== win-32", + "win64": "== win-64", + "osx": "== osx-64", +} def package_exists(package_name): @@ -316,7 +339,7 @@ def add_parser(repos): ) cran.add_argument( "packages", - nargs='+', + nargs="+", help="""CRAN packages to create recipe skeletons for.""", ) cran.add_argument( @@ -353,101 +376,102 @@ def add_parser(repos): ) cran.add_argument( "--r-interp", - default='r-base', + default="r-base", help="Declare R interpreter package", ) cran.add_argument( "--use-binaries-ver", - help=("Repackage binaries from version provided by argument instead of building " - "from source."), + help=( + "Repackage binaries from version provided by argument instead of building " + "from source." + ), ) cran.add_argument( "--use-when-no-binary", - choices=('src', - 'old', - 'src-old', - 'old-src', - 'error'), - default='src', + choices=("src", "old", "src-old", "old-src", "error"), + default="src", help="""Sometimes binaries are not available at the correct version for a given platform (macOS). You can use this flag to specify what fallback to take, either compiling from source or using an older - binary or trying one then the other.""" + binary or trying one then the other.""", ) cran.add_argument( "--use-noarch-generic", - action='store_true', - dest='use_noarch_generic', - help=("Mark packages that do not need compilation as `noarch: generic`"), + action="store_true", + dest="use_noarch_generic", + help="Mark packages that do not need compilation as `noarch: generic`", ) cran.add_argument( "--use-rtools-win", - action='store_true', + action="store_true", help="Use Rtools when building from source on Windows", ) cran.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.', + action="store_true", + help="Create recipes for dependencies if they do not already exist.", ) cran.add_argument( "--no-recursive", - action='store_false', - dest='recursive', + action="store_false", + dest="recursive", help="Don't create recipes for dependencies if they do not already exist.", ) cran.add_argument( - '--no-archive', - action='store_false', - dest='archive', + "--no-archive", + action="store_false", + dest="archive", help="Don't include an Archive download url.", ) cran.add_argument( - '--allow-archived', - action='store_true', - dest='allow_archived', + "--allow-archived", + action="store_true", + dest="allow_archived", help="If the package has been archived, download the latest version.", ) cran.add_argument( "--version-compare", - action='store_true', + action="store_true", help="""Compare the package version of the recipe with the one available - on CRAN. Exits 1 if a newer version is available and 0 otherwise.""" + on CRAN. Exits 1 if a newer version is available and 0 otherwise.""", ) cran.add_argument( "--update-policy", - action='store', - choices=('error', - 'skip-up-to-date', - 'skip-existing', - 'overwrite', - 'merge-keep-build-num', - 'merge-incr-build-num'), - default='error', + action="store", + choices=( + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ), + default="error", help="""Dictates what to do when existing packages are encountered in the output directory (set by --output-dir). In the present implementation, the merge options avoid overwriting bld.bat and build.sh and only manage copying across patches, and the `build/{number,script_env}` fields. When the version changes, both merge options reset `build/number` to 0. When the version does - not change they either keep the old `build/number` or else increase it by one.""" + not change they either keep the old `build/number` or else increase it by one.""", ) cran.add_argument( - '-m', '--variant-config-files', - default=cc_conda_build.get('skeleton_config_yaml', None), + "-m", + "--variant-config-files", + default=context.conda_build.get("skeleton_config_yaml", None), help="""Variant config file to add. These yaml files can contain - keys such as `cran_mirror`. Only one can be provided here.""" + keys such as `cran_mirror`. Only one can be provided here.""", ) cran.add_argument( "--add-cross-r-base", - action='store_true', + action="store_true", default=False, - help="""Add cross-r-base to build requirements for cross compiling""" + help="""Add cross-r-base to build requirements for cross compiling""", ) cran.add_argument( "--no-comments", - action='store_true', + action="store_true", default=False, - help="""Do not include instructional comments in recipe files""" + help="""Do not include instructional comments in recipe files""", ) @@ -457,19 +481,19 @@ def dict_from_cran_lines(lines): if not line: continue try: - if ': ' in line: - (k, v) = line.split(': ', 1) + if ": " in line: + (k, v) = line.split(": ", 1) else: # Sometimes fields are included but left blank, e.g.: # - Enhances in data.tree # - Suggests in corpcor - (k, v) = line.split(':', 1) + (k, v) = line.split(":", 1) except ValueError: - sys.exit("Error: Could not parse metadata (%s)" % line) + sys.exit(f"Error: Could not parse metadata ({line})") d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) - d['orig_lines'] = lines + d["orig_lines"] = lines return d @@ -494,17 +518,17 @@ def remove_package_line_continuations(chunk): 'License: GPL (>= 2)', 'NeedsCompilation: no'] """ # NOQA - continuation = (' ', '\t') + continuation = (" ", "\t") continued_ix = None continued_line = None had_continuation = False accumulating_continuations = False - chunk.append('') + chunk.append("") - for (i, line) in enumerate(chunk): + for i, line in enumerate(chunk): if line.startswith(continuation): - line = ' ' + line.lstrip() + line = " " + line.lstrip() if accumulating_continuations: assert had_continuation continued_line += line @@ -527,7 +551,7 @@ def remove_package_line_continuations(chunk): # Remove the None(s). chunk = [c for c in chunk if c] - chunk.append('') + chunk.append("") return chunk @@ -542,25 +566,30 @@ def yaml_quote_string(string): Note that this function is NOT general. """ - return yaml.dump(string, Dumper=SafeDumper).replace('\n...\n', '').replace('\n', '\n ').rstrip('\n ') + return ( + yaml.dump(string, indent=True, Dumper=SafeDumper) + .replace("\n...\n", "") + .replace("\n", "\n ") + .rstrip("\n ") + ) # Due to how we render the metadata there can be significant areas of repeated newlines. # This collapses them and also strips any trailing spaces. def clear_whitespace(string): lines = [] - last_line = '' + last_line = "" for line in string.splitlines(): line = line.rstrip() - if not (line == '' and last_line == ''): + if not (line == "" and last_line == ""): lines.append(line) last_line = line - return '\n'.join(lines) + return "\n".join(lines) def read_description_contents(fp): bytes = fp.read() - text = bytes.decode('utf-8', errors='replace') + text = bytes.decode("utf-8", errors="replace") text = clear_whitespace(text) lines = remove_package_line_continuations(text.splitlines()) return dict_from_cran_lines(lines) @@ -568,46 +597,53 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print('Reading package metadata from %s' % path) - if basename(path) == 'DESCRIPTION': - with open(path, 'rb') as fp: + print(f"Reading package metadata from {path}") + if basename(path) == "DESCRIPTION": + with open(path, "rb") as fp: return read_description_contents(fp) elif tarfile.is_tarfile(path): - with tarfile.open(path, 'r') as tf: + with tarfile.open(path, "r") as tf: for member in tf: - if re.match(r'^[^/]+/DESCRIPTION$', member.name): + if re.match(r"^[^/]+/DESCRIPTION$", member.name): fp = tf.extractfile(member) return read_description_contents(fp) - elif path.endswith('.zip'): - with zipfile.ZipFile(path, 'r') as zf: + elif path.endswith(".zip"): + with zipfile.ZipFile(path, "r") as zf: for member in zf.infolist(): - if re.match(r'^[^/]+/DESCRIPTION$', member.filename): - fp = zf.open(member, 'r') + if re.match(r"^[^/]+/DESCRIPTION$", member.filename): + fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit('Cannot extract a DESCRIPTION from file %s' % path) - sys.exit('%s does not seem to be a CRAN package (no DESCRIPTION) file' % path) + sys.exit(f"Cannot extract a DESCRIPTION from file {path}") + sys.exit(f"{path} does not seem to be a CRAN package (no DESCRIPTION) file") def get_latest_git_tag(config): # SO says to use taggerdate instead of committerdate, but that is invalid for lightweight tags. - p = subprocess.Popen(['git', 'for-each-ref', - 'refs/tags', - '--sort=-committerdate', - '--format=%(refname:short)', - '--count=1'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, - cwd=config.work_dir) + p = subprocess.Popen( + [ + "git", + "for-each-ref", + "refs/tags", + "--sort=-committerdate", + "--format=%(refname:short)", + "--count=1", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=config.work_dir, + ) stdout, stderr = p.communicate() - stdout = stdout.decode('utf-8') - stderr = stderr.decode('utf-8') + stdout = stdout.decode("utf-8") + stderr = stderr.decode("utf-8") if stderr or p.returncode: - sys.exit("Error: git tag failed (%s)" % stderr) + sys.exit(f"Error: git tag failed ({stderr})") tags = stdout.strip().splitlines() if not tags: sys.exit("Error: no tags found") - print("Using tag %s" % tags[-1]) + print(f"Using tag {tags[-1]}") return tags[-1] @@ -617,7 +653,7 @@ def _ssl_no_verify(): This provides a workaround for users in some corporate environments where MITM style proxies make it difficult to fetch data over HTTPS. """ - return environ.get('SSL_NO_VERIFY', '').strip().lower() in ('1', 'true') + return environ.get("SSL_NO_VERIFY", "").strip().lower() in ("1", "true") def get_session(output_dir, verbose=True): @@ -628,12 +664,14 @@ def get_session(output_dir, verbose=True): import cachecontrol.caches except ImportError: if verbose: - print("Tip: install CacheControl and lockfile (conda packages) to cache the " - "CRAN metadata") + print( + "Tip: install CacheControl and lockfile (conda packages) to cache the " + "CRAN metadata" + ) else: - session = cachecontrol.CacheControl(session, - cache=cachecontrol.caches.FileCache(join(output_dir, - '.web_cache'))) + session = cachecontrol.CacheControl( + session, cache=cachecontrol.caches.FileCache(join(output_dir, ".web_cache")) + ) return session @@ -645,31 +683,33 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): r.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 404: - print("No archive directory for package %s" % package) + print(f"No archive directory for package {package}") return [] raise versions = [] - for p, dt in re.findall(r'\1\s*]*>([^<]*)', r.text): - if p.endswith('.tar.gz') and '_' in p: - name, version = p.rsplit('.', 2)[0].split('_', 1) + for p, dt in re.findall( + r'\1\s*]*>([^<]*)', r.text + ): + if p.endswith(".tar.gz") and "_" in p: + name, version = p.rsplit(".", 2)[0].split("_", 1) versions.append((dt.strip(), version)) return [v for dt, v in sorted(versions, reverse=True)] def get_cran_index(cran_url, session, verbose=True): if verbose: - print("Fetching main index from %s" % cran_url) + print(f"Fetching main index from {cran_url}") r = session.get(cran_url + "/src/contrib/") r.raise_for_status() records = {} for p in re.findall(r'\1', r.text): - if p.endswith('.tar.gz') and '_' in p: - name, version = p.rsplit('.', 2)[0].split('_', 1) + if p.endswith(".tar.gz") and "_" in p: + name, version = p.rsplit(".", 2)[0].split("_", 1) records[name.lower()] = (name, version) r = session.get(cran_url + "/src/contrib/Archive/") r.raise_for_status() for p in re.findall(r'\1/', r.text): - if re.match(r'^[A-Za-z]', p): + if re.match(r"^[A-Za-z]", p): records.setdefault(p.lower(), (p, None)) return records @@ -681,7 +721,7 @@ def make_array(m, key, allow_empty=False): except: old_vals = [] if old_vals or allow_empty: - result.append(key.split('/')[-1] + ":") + result.append(key.split("/")[-1] + ":") for old_val in old_vals: result.append(f"{INDENT}{old_val}") return result @@ -690,23 +730,25 @@ def make_array(m, key, allow_empty=False): def existing_recipe_dir(output_dir, output_suffix, package, version): result = None if version: - package = package + '-' + version.replace('-', '_') + package = package + "-" + version.replace("-", "_") if exists(join(output_dir, package)): result = normpath(join(output_dir, package)) elif exists(join(output_dir, package + output_suffix)): result = normpath(join(output_dir, package + output_suffix)) - elif exists(join(output_dir, 'r-' + package + output_suffix)): - result = normpath(join(output_dir, 'r-' + package + output_suffix)) + elif exists(join(output_dir, "r-" + package + output_suffix)): + result = normpath(join(output_dir, "r-" + package + output_suffix)) return result def strip_end(string, end): if string.endswith(end): - return string[:-len(end)] + return string[: -len(end)] return string -def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version=None): +def package_to_inputs_dict( + output_dir, output_suffix, git_tag, package: str, version=None +): """ Converts `package` (*) into a tuple of: @@ -733,37 +775,47 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version= """ if isfile(package): return None - print("Parsing input package %s:" % package) - package = strip_end(package, '/') + print(f"Parsing input package {package}:") + package = strip_end(package, "/") package = strip_end(package, sep) - if 'github.com' in package: - package = strip_end(package, '.git') + if "github.com" in package: + package = strip_end(package, ".git") pkg_name = basename(package).lower() - pkg_name = strip_end(pkg_name, '-feedstock') + pkg_name = strip_end(pkg_name, "-feedstock") if output_suffix: pkg_name = strip_end(pkg_name, output_suffix) - if pkg_name.startswith('r-'): + if pkg_name.startswith("r-"): pkg_name = pkg_name[2:] - if package.startswith('file://'): - location = package.replace('file://', '') + if package.startswith("file://"): + location = package.replace("file://", "") pkg_filename = basename(location) - pkg_name = re.match(r'(.*)_(.*)', pkg_filename).group(1).lower() - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) + pkg_name = re.match(r"(.*)_(.*)", pkg_filename).group(1).lower() + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) elif isabs(package): commp = commonprefix((package, output_dir)) if commp != output_dir: - raise RuntimeError("package {} specified with abs path outside of output-dir {}".format( - package, output_dir)) + raise RuntimeError( + f"package {package} specified with abs path outside of output-dir {output_dir}" + ) location = package - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) - elif 'github.com' in package: + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) + elif "github.com" in package: location = package - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) else: - location = existing_location = existing_recipe_dir(output_dir, output_suffix, package, version) + location = existing_location = existing_recipe_dir( + output_dir, output_suffix, package, version + ) + m: MetaData | None if existing_location: try: - m = metadata.MetaData(existing_location) + m = MetaData(existing_location) except: # Happens when the folder exists but contains no recipe. m = None @@ -773,56 +825,87 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version= # It can still be the case that a package without 'github.com' in the location does really # come from there, for that we need to inspect the existing metadata's source/git_url. old_git_rev = git_tag - if location and m and 'github.com' not in location: - git_url = m.get_value('source/git_url', '') - if 'github.com' in git_url: + if location and m and "github.com" not in location: + git_url = m.get_value("source/git_url", "") + if "github.com" in git_url: location = git_url - old_git_rev = m.get_value('source/git_rev', None) + old_git_rev = m.get_value("source/git_rev", None) - vstr = '-' + version.replace('-', '_') if version else '' - new_location = join(output_dir, 'r-' + pkg_name + vstr + output_suffix) + vstr = "-" + version.replace("-", "_") if version else "" + new_location = join(output_dir, "r-" + pkg_name + vstr + output_suffix) print(f".. name: {pkg_name} location: {location} new_location: {new_location}") - return {'pkg-name': pkg_name, - 'location': location, - 'old-git-rev': old_git_rev, - 'old-metadata': m, - 'new-location': new_location, - 'version': version} + return { + "pkg-name": pkg_name, + "location": location, + "old-git-rev": old_git_rev, + "old-metadata": m, + "new-location": new_location, + "version": version, + } def get_available_binaries(cran_url, details): - url = cran_url + '/' + details['dir'] + url = cran_url + "/" + details["dir"] response = requests.get(url) response.raise_for_status() - ext = details['ext'] + ext = details["ext"] for filename in re.findall(r'\1', response.text): if filename.endswith(ext): - pkg, _, ver = filename.rpartition('_') + pkg, _, ver = filename.rpartition("_") ver, _, _ = ver.rpartition(ext) - details['binaries'].setdefault(pkg, []).append((ver, url + filename)) + details["binaries"].setdefault(pkg, []).append((ver, url + filename)) def remove_comments(template): - re_comment = re.compile(r'^\s*#\s') - lines = template.split('\n') + re_comment = re.compile(r"^\s*#\s") + lines = template.split("\n") lines_no_comments = [line for line in lines if not re_comment.match(line)] - return '\n'.join(lines_no_comments) - - -def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None, - git_tag=None, cran_url=None, recursive=False, archive=True, - version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None, - use_noarch_generic=False, use_when_no_binary='src', use_rtools_win=False, config=None, - variant_config_files=None, allow_archived=False, add_cross_r_base=False, - no_comments=False): - - if use_when_no_binary != 'error' and \ - use_when_no_binary != 'src' and \ - use_when_no_binary != 'old' and \ - use_when_no_binary != 'old-src': + return "\n".join(lines_no_comments) + + +def skeletonize( + in_packages: list[str], + output_dir: str = ".", + output_suffix: str = "", + add_maintainer: str | None = None, + version: str | None = None, + git_tag: str | None = None, + cran_url: str | None = None, + recursive: bool = False, + archive: bool = True, + version_compare: bool = False, + update_policy: Literal[ + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ] + | None = None, + r_interp: str = "r-base", + use_binaries_ver: str | None = None, + use_noarch_generic: bool = False, + use_when_no_binary: Literal["error", "src", "old", "old-src"] = "src", + use_rtools_win: bool = False, + config: Config | None = None, + variant_config_files: list[str] | None = None, + allow_archived: bool = False, + add_cross_r_base: bool = False, + no_comments: bool = False, +) -> None: + if ( + use_when_no_binary != "error" + and use_when_no_binary != "src" + and use_when_no_binary != "old" + and use_when_no_binary != "old-src" + ): print(f"ERROR: --use_when_no_binary={use_when_no_binary} not yet implemented") sys.exit(1) + + m: MetaData + output_dir = realpath(output_dir) config = get_or_merge_config(config, variant_config_files=variant_config_files) @@ -833,64 +916,74 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No if not cran_url: with TemporaryDirectory() as t: _variant = get_package_variants(t, config)[0] - cran_url = ensure_list(_variant.get('cran_mirror', DEFAULT_VARIANTS['cran_mirror']))[0] + cran_url = ensure_list( + _variant.get("cran_mirror", DEFAULT_VARIANTS["cran_mirror"]) + )[0] if len(in_packages) > 1 and version_compare: raise ValueError("--version-compare only works with one package at a time") - if update_policy == 'error' and not in_packages: + if update_policy == "error" and not in_packages: raise ValueError("At least one package must be supplied") package_dicts = {} package_list = [] - cran_url = cran_url.rstrip('/') + cran_url = cran_url.rstrip("/") # Get cran index lazily so we don't have to go to CRAN # for a github repo or a local tarball cran_index = None - cran_layout_template = \ - {'source': {'selector': '{others}', - 'dir': 'src/contrib/', - 'ext': '.tar.gz', - # If we had platform filters we would change this to: - # build_for_linux or is_github_url or is_tarfile - 'use_this': True}, - 'win-64': {'selector': 'win64', - 'dir': f'bin/windows/contrib/{use_binaries_ver}/', - 'ext': '.zip', - 'use_this': True if use_binaries_ver else False}, - 'osx-64': {'selector': 'osx', - 'dir': 'bin/macosx/el-capitan/contrib/{}/'.format( - use_binaries_ver), - 'ext': '.tgz', - 'use_this': True if use_binaries_ver else False}} + cran_layout_template = { + "source": { + "selector": "{others}", + "dir": "src/contrib/", + "ext": ".tar.gz", + # If we had platform filters we would change this to: + # build_for_linux or is_github_url or is_tarfile + "use_this": True, + }, + "win-64": { + "selector": "win64", + "dir": f"bin/windows/contrib/{use_binaries_ver}/", + "ext": ".zip", + "use_this": True if use_binaries_ver else False, + }, + "osx-64": { + "selector": "osx", + "dir": f"bin/macosx/el-capitan/contrib/{use_binaries_ver}/", + "ext": ".tgz", + "use_this": True if use_binaries_ver else False, + }, + } # Figure out what binaries are available once: for archive_type, archive_details in cran_layout_template.items(): - archive_details['binaries'] = dict() - if archive_type != 'source' and archive_details['use_this']: + archive_details["binaries"] = dict() + if archive_type != "source" and archive_details["use_this"]: get_available_binaries(cran_url, archive_details) for package in in_packages: - inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version) + inputs_dict = package_to_inputs_dict( + output_dir, output_suffix, git_tag, package, version + ) if inputs_dict: - package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}}) + package_dicts.update({inputs_dict["pkg-name"]: {"inputs": inputs_dict}}) for package_name, package_dict in package_dicts.items(): package_list.append(package_name) while package_list: - inputs = package_dicts[package_list.pop()]['inputs'] - location = inputs['location'] - pkg_name = inputs['pkg-name'] - version = inputs['version'] - is_github_url = location and 'github.com' in location + inputs = package_dicts[package_list.pop()]["inputs"] + location = inputs["location"] + pkg_name = inputs["pkg-name"] + version = inputs["version"] + is_github_url = location and "github.com" in location is_tarfile = location and isfile(location) and tarfile.is_tarfile(location) is_archive = False - url = inputs['location'] + url = inputs["location"] - dir_path = inputs['new-location'] + dir_path = inputs["new-location"] print(f"Making/refreshing recipe for {pkg_name}") # Bodges GitHub packages into cran_metadata @@ -899,33 +992,44 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No elif is_github_url or is_tarfile: rm_rf(config.work_dir) - m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config) - source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) + m = MetaData.fromdict({"source": {"git_url": location}}, config=config) + source.git_source( + m.get_section("source"), m.config.git_cache, m.config.work_dir + ) new_git_tag = git_tag if git_tag else get_latest_git_tag(config) - p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=config.work_dir) + p = subprocess.Popen( + ["git", "checkout", new_git_tag], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=config.work_dir, + ) stdout, stderr = p.communicate() - stdout = stdout.decode('utf-8') - stderr = stderr.decode('utf-8') + stdout = stdout.decode("utf-8") + stderr = stderr.decode("utf-8") if p.returncode: - sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % - (new_git_tag, stderr.strip())) + sys.exit( + f"Error: 'git checkout {new_git_tag}' failed ({stderr.strip()}).\n" + "Invalid tag?" + ) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): - sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") - sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION") + sub_description_pkg = join(config.work_dir, "pkg", "DESCRIPTION") + sub_description_name = join( + config.work_dir, location.split("/")[-1], "DESCRIPTION" + ) if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: - sys.exit("%s does not appear to be a valid R package " - "(no DESCRIPTION file in %s, %s)" - % (location, sub_description_pkg, sub_description_name)) + sys.exit( + f"{location} does not appear to be a valid R package " + f"(no DESCRIPTION file in {sub_description_pkg}, {sub_description_name})" + ) cran_package = get_archive_metadata(DESCRIPTION) else: @@ -933,15 +1037,19 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) if pkg_name.lower() not in cran_index: - sys.exit("Package %s not found" % pkg_name) + sys.exit(f"Package {pkg_name} not found") package, cran_version = cran_index[pkg_name.lower()] if cran_version and (not version or version == cran_version): version = cran_version elif version and not archive: - print(f'ERROR: Version {version} of package {package} is archived, but --no-archive was selected') + print( + f"ERROR: Version {version} of package {package} is archived, but --no-archive was selected" + ) sys.exit(1) elif not version and not cran_version and not allow_archived: - print("ERROR: Package %s is archived; to build, use --allow-archived or a --version value" % pkg_name) + print( + f"ERROR: Package {pkg_name} is archived; to build, use --allow-archived or a --version value" + ) sys.exit(1) else: is_archive = True @@ -951,66 +1059,75 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No if not version: version = all_versions[0] elif version not in all_versions: - msg = f'ERROR: Version {version} of package {package} not found.\n Available versions: ' - print(msg + ', '.join(all_versions)) + msg = f"ERROR: Version {version} of package {package} not found.\n Available versions: " + print(msg + ", ".join(all_versions)) sys.exit(1) cran_package = None if cran_package is not None: - package = cran_package['Package'] - version = cran_package['Version'] + package = cran_package["Package"] + version = cran_package["Version"] plower = package.lower() d = package_dicts[pkg_name] - d.update({ - 'cran_packagename': package, - 'cran_version': version, - 'packagename': 'r-' + plower, + d.update( + { + "cran_packagename": package, + "cran_version": version, + "packagename": "r-" + plower, # Conda versions cannot have -. Conda (verlib) will treat _ as a . - 'conda_version': version.replace('-', '_'), - 'patches': '', - 'build_number': 0, - 'build_depends': '', - 'host_depends': '', - 'run_depends': '', + "conda_version": version.replace("-", "_"), + "patches": "", + "build_number": 0, + "build_depends": "", + "host_depends": "", + "run_depends": "", # CRAN doesn't seem to have this metadata :( - 'home_comment': '#', - 'homeurl': '', - 'summary_comment': '#', - 'summary': '', - 'binary1': '', - 'binary2': '' - }) + "home_comment": "#", + "homeurl": "", + "summary_comment": "#", + "summary": "", + "binary1": "", + "binary2": "", + } + ) if version_compare: - sys.exit(not version_compare(dir_path, d['conda_version'])) + sys.exit(not version_compare(dir_path, d["conda_version"])) patches = [] script_env = [] extra_recipe_maintainers = [] build_number = 0 - if update_policy.startswith('merge') and inputs['old-metadata']: - m = inputs['old-metadata'] - patches = make_array(m, 'source/patches') - script_env = make_array(m, 'build/script_env') - extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer) - if m.version() == d['conda_version']: - build_number = int(m.get_value('build/number', 0)) - build_number += 1 if update_policy == 'merge-incr-build-num' else 0 + if ( + update_policy + and update_policy.startswith("merge") + and inputs["old-metadata"] + ): + m = inputs["old-metadata"] + patches = make_array(m, "source/patches") + script_env = make_array(m, "build/script_env") + extra_recipe_maintainers = make_array( + m, "extra/recipe-maintainers", add_maintainer + ) + if m.version() == d["conda_version"]: + build_number = m.build_number() + build_number += 1 if update_policy == "merge-incr-build-num" else 0 if add_maintainer: - new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT, - add_maintainer=add_maintainer) + new_maintainer = f"{INDENT}{add_maintainer}" if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. - extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True) + extra_recipe_maintainers = make_array( + {}, "extra/recipe-maintainers", True + ) extra_recipe_maintainers.append(new_maintainer) if len(extra_recipe_maintainers): extra_recipe_maintainers[1:].sort() extra_recipe_maintainers.insert(0, "extra:\n ") - d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers) - d['patches'] = ''.join(patches) - d['script_env'] = ''.join(script_env) - d['build_number'] = build_number + d["extra_recipe_maintainers"] = "".join(extra_recipe_maintainers) + d["patches"] = "".join(patches) + d["script_env"] = "".join(script_env) + d["build_number"] = build_number cached_path = None cran_layout = copy.deepcopy(cran_layout_template) @@ -1018,209 +1135,262 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No description_path = None for archive_type, archive_details in cran_layout.items(): - contrib_url = '' - archive_details['cran_version'] = d['cran_version'] - archive_details['conda_version'] = d['conda_version'] - if is_archive and archive_type == 'source': - archive_details['dir'] += 'Archive/' + package + '/' - available_artefact = True if archive_type == 'source' else \ - package in archive_details['binaries'] and \ - any(d['cran_version'] == v for v, _ in archive_details['binaries'][package]) + contrib_url = "" + archive_details["cran_version"] = d["cran_version"] + archive_details["conda_version"] = d["conda_version"] + if is_archive and archive_type == "source": + archive_details["dir"] += "Archive/" + package + "/" + available_artefact = ( + True + if archive_type == "source" + else package in archive_details["binaries"] + and any( + d["cran_version"] == v + for v, _ in archive_details["binaries"][package] + ) + ) if not available_artefact: - if use_when_no_binary == 'error': - print("ERROR: --use-when-no-binary is error (and there is no binary)") + if use_when_no_binary == "error": + print( + "ERROR: --use-when-no-binary is error (and there is no binary)" + ) sys.exit(1) - elif use_when_no_binary.startswith('old'): - if package not in archive_details['binaries']: - if use_when_no_binary.endswith('src'): + elif use_when_no_binary.startswith("old"): + if package not in archive_details["binaries"]: + if use_when_no_binary.endswith("src"): available_artefact = False - archive_details['use_this'] = False + archive_details["use_this"] = False continue else: - print("ERROR: No binary nor old binary found " - "(maybe pass --use-when-no-binary=old-src to fallback to source?)") + print( + "ERROR: No binary nor old binary found " + "(maybe pass --use-when-no-binary=old-src to fallback to source?)" + ) sys.exit(1) # Version needs to be stored in archive_details. - archive_details['cranurl'] = archive_details['binaries'][package][-1][1] - archive_details['conda_version'] = archive_details['binaries'][package][-1][0] - archive_details['cran_version'] = archive_details['conda_version'].replace('_', '-') + archive_details["cranurl"] = archive_details["binaries"][package][ + -1 + ][1] + archive_details["conda_version"] = archive_details["binaries"][ + package + ][-1][0] + archive_details["cran_version"] = archive_details[ + "conda_version" + ].replace("_", "-") available_artefact = True # We may need to inspect the file later to determine which compilers are needed. cached_path = None sha256 = hashlib.sha256() - if archive_details['use_this'] and available_artefact: + if archive_details["use_this"] and available_artefact: if is_tarfile: filename = basename(location) contrib_url = relpath(location, dir_path) contrib_url_rendered = package_url = contrib_url cached_path = location - elif not is_github_url or archive_type != 'source': - filename_rendered = '{}_{}{}'.format( - package, archive_details['cran_version'], archive_details['ext']) - filename = f'{package}_{{{{ version }}}}' + archive_details['ext'] - contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir']) - contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir']) + elif not is_github_url or archive_type != "source": + filename_rendered = "{}_{}{}".format( + package, archive_details["cran_version"], archive_details["ext"] + ) + filename = f"{package}_{{{{ version }}}}" + archive_details["ext"] + contrib_url = "{{{{ cran_mirror }}}}/{}".format( + archive_details["dir"] + ) + contrib_url_rendered = cran_url + "/{}".format( + archive_details["dir"] + ) package_url = contrib_url_rendered + filename_rendered print(f"Downloading {archive_type} from {package_url}") try: cached_path, _ = source.download_to_cache( - config.src_cache, '', - {'url': package_url, 'fn': archive_type + '-' + filename_rendered}) + config.src_cache, + "", + { + "url": package_url, + "fn": archive_type + "-" + filename_rendered, + }, + ) except: - print("logic error, file {} should exist, we found it in a dir listing earlier." - .format(package_url)) + print( + f"logic error, file {package_url} should exist, we found it in a dir listing earlier." + ) sys.exit(1) - if description_path is None or archive_type == 'source': + if description_path is None or archive_type == "source": description_path = cached_path available_details = {} - available_details['selector'] = archive_details['selector'] - available_details['cran_version'] = archive_details['cran_version'] - available_details['conda_version'] = archive_details['conda_version'] + available_details["selector"] = archive_details["selector"] + available_details["cran_version"] = archive_details["cran_version"] + available_details["conda_version"] = archive_details["conda_version"] if cached_path: - sha256.update(open(cached_path, 'rb').read()) - archive_details['cranurl'] = package_url - available_details['filename'] = filename - available_details['contrib_url'] = contrib_url - available_details['contrib_url_rendered'] = contrib_url_rendered - available_details['hash_entry'] = f'sha256: {sha256.hexdigest()}' - available_details['cached_path'] = cached_path + sha256.update(open(cached_path, "rb").read()) + archive_details["cranurl"] = package_url + available_details["filename"] = filename + available_details["contrib_url"] = contrib_url + available_details["contrib_url_rendered"] = contrib_url_rendered + available_details["hash_entry"] = f"sha256: {sha256.hexdigest()}" + available_details["cached_path"] = cached_path # This is rubbish; d[] should be renamed global[] and should be # merged into source and binaryN. - if archive_type == 'source': + if archive_type == "source": if is_github_url: - available_details['url_key'] = '' - available_details['git_url_key'] = 'git_url:' - available_details['git_tag_key'] = 'git_tag:' - hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)' - available_details['hash_entry'] = hash_msg - available_details['filename'] = '' - available_details['cranurl'] = '' - available_details['git_url'] = url - available_details['git_tag'] = new_git_tag - available_details['archive_keys'] = '' + available_details["url_key"] = "" + available_details["git_url_key"] = "git_url:" + available_details["git_tag_key"] = "git_tag:" + hash_msg = "# You can add a hash for the file here, (md5, sha1 or sha256)" + available_details["hash_entry"] = hash_msg + available_details["filename"] = "" + available_details["cranurl"] = "" + available_details["git_url"] = url + available_details["git_tag"] = new_git_tag + available_details["archive_keys"] = "" else: - available_details['url_key'] = 'url:' - available_details['git_url_key'] = '' - available_details['git_tag_key'] = '' - available_details['cranurl'] = ' ' + contrib_url + filename - available_details['git_url'] = '' - available_details['git_tag'] = '' + available_details["url_key"] = "url:" + available_details["git_url_key"] = "" + available_details["git_tag_key"] = "" + available_details["cranurl"] = " " + contrib_url + filename + available_details["git_url"] = "" + available_details["git_tag"] = "" else: - available_details['cranurl'] = archive_details['cranurl'] + available_details["cranurl"] = archive_details["cranurl"] - available_details['patches'] = d['patches'] + available_details["patches"] = d["patches"] available[archive_type] = available_details # Figure out the selectors according to what is available. - _all = ['linux', 'win32', 'win64', 'osx'] + _all = ["linux", "win32", "win64", "osx"] from_source = _all[:] binary_id = 1 for archive_type, archive_details in available.items(): - if archive_type == 'source': + if archive_type == "source": for k, v in archive_details.items(): d[k] = v else: - sel = archive_details['selector'] + sel = archive_details["selector"] # Does the file exist? If not we need to build from source. from_source.remove(sel) binary_id += 1 if from_source == _all: sel_src = "" - sel_src_and_win = ' # [win]' - sel_src_not_win = ' # [not win]' + sel_src_and_win = " # [win]" + sel_src_not_win = " # [not win]" else: - sel_src = ' # [' + ' or '.join(from_source) + ']' - sel_src_and_win = ' # [' + ' or '.join(fs for fs in from_source if - fs.startswith('win')) + ']' - sel_src_not_win = ' # [' + ' or '.join(fs for fs in from_source if not - fs.startswith('win')) + ']' + sel_src = " # [" + " or ".join(from_source) + "]" + sel_src_and_win = ( + " # [" + + " or ".join(fs for fs in from_source if fs.startswith("win")) + + "]" + ) + sel_src_not_win = ( + " # [" + + " or ".join(fs for fs in from_source if not fs.startswith("win")) + + "]" + ) sel_cross = " # [build_platform != target_platform]" - d['sel_src'] = sel_src - d['sel_src_and_win'] = sel_src_and_win - d['sel_src_not_win'] = sel_src_not_win - d['from_source'] = from_source - - if 'source' in available: - available_details = available['source'] - available_details['sel'] = sel_src - filename = available_details['filename'] - if 'contrib_url' in available_details: - contrib_url = available_details['contrib_url'] + d["sel_src"] = sel_src + d["sel_src_and_win"] = sel_src_and_win + d["sel_src_not_win"] = sel_src_not_win + d["from_source"] = from_source + + if "source" in available: + available_details = available["source"] + available_details["sel"] = sel_src + filename = available_details["filename"] + if "contrib_url" in available_details: + contrib_url = available_details["contrib_url"] if archive: if is_tarfile: - available_details['cranurl'] = (INDENT + contrib_url) + available_details["cranurl"] = INDENT + contrib_url elif not is_archive: - available_details['cranurl'] = (INDENT + contrib_url + - filename + sel_src + INDENT + contrib_url + - f'Archive/{package}/' + filename + sel_src) + available_details["cranurl"] = ( + INDENT + + contrib_url + + filename + + sel_src + + INDENT + + contrib_url + + f"Archive/{package}/" + + filename + + sel_src + ) else: - available_details['cranurl'] = ' ' + contrib_url + filename + sel_src + available_details["cranurl"] = ( + " " + contrib_url + filename + sel_src + ) if not is_github_url: - available_details['archive_keys'] = '{url_key}{sel}' \ - ' {cranurl}\n' \ - ' {hash_entry}{sel}'.format( - **available_details) + available_details["archive_keys"] = ( + "{url_key}{sel} {cranurl}\n {hash_entry}{sel}" + ).format(**available_details) # Extract the DESCRIPTION data from the source if cran_package is None: cran_package = get_archive_metadata(description_path) - d['cran_metadata'] = '\n'.join(['# %s' % line for line in - cran_package['orig_lines'] if line]) + d["cran_metadata"] = "\n".join( + [f"# {line}" for line in cran_package["orig_lines"] if line] + ) # Render the source and binaryN keys binary_id = 1 - d['version_binary1'] = d['version_binary2'] = "" + d["version_binary1"] = d["version_binary2"] = "" for archive_type, archive_details in available.items(): - if archive_type == 'source': - d['source'] = SOURCE_META.format(**archive_details) - d['version_source'] = VERSION_META.format(**archive_details) + if archive_type == "source": + d["source"] = SOURCE_META.format(**archive_details) + d["version_source"] = VERSION_META.format(**archive_details) else: - archive_details['sel'] = ' # [' + archive_details['selector'] + ']' - d['binary' + str(binary_id)] = BINARY_META.format(**archive_details) - d['version_binary' + str(binary_id)] = VERSION_META.format(**archive_details) + archive_details["sel"] = " # [" + archive_details["selector"] + "]" + d["binary" + str(binary_id)] = BINARY_META.format(**archive_details) + d["version_binary" + str(binary_id)] = VERSION_META.format( + **archive_details + ) binary_id += 1 - license_info = get_license_info(cran_package.get("License", "None"), allowed_license_families) - d['license'], d['license_file'], d['license_family'] = license_info + license_info = get_license_info( + cran_package.get("License", "None"), allowed_license_families + ) + d["license"], d["license_file"], d["license_family"] = license_info - if 'License_is_FOSS' in cran_package: - d['license'] += ' (FOSS)' - if cran_package.get('License_restricts_use') == 'yes': - d['license'] += ' (Restricts use)' + if "License_is_FOSS" in cran_package: + d["license"] += " (FOSS)" + if cran_package.get("License_restricts_use") == "yes": + d["license"] += " (Restricts use)" if "URL" in cran_package: - d['home_comment'] = '' - d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) + d["home_comment"] = "" + d["homeurl"] = " " + yaml_quote_string(cran_package["URL"]) else: # use CRAN page as homepage if nothing has been specified - d['home_comment'] = '' + d["home_comment"] = "" if is_github_url: - d['homeurl'] = f' {location}' + d["homeurl"] = f" {location}" else: - d['homeurl'] = f' https://CRAN.R-project.org/package={package}' + d["homeurl"] = f" https://CRAN.R-project.org/package={package}" - if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes': - d['noarch_generic'] = '' + if ( + not use_noarch_generic + or cran_package.get("NeedsCompilation", "no") == "yes" + ): + d["noarch_generic"] = "" else: - d['noarch_generic'] = 'noarch: generic' + d["noarch_generic"] = "noarch: generic" - if 'Description' in cran_package: - d['summary_comment'] = '' - d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) + if "Description" in cran_package: + d["summary_comment"] = "" + d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] + d["suggests"] = "# Suggests: {}".format(cran_package["Suggests"]) else: - d['suggests'] = '' + d["suggests"] = "" # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. - depends = [s.strip() for s in cran_package.get('Depends', - '').split(',') if s.strip()] - imports = [s.strip() for s in cran_package.get('Imports', - '').split(',') if s.strip()] - links = [s.strip() for s in cran_package.get("LinkingTo", - '').split(',') if s.strip()] + depends = [ + s.strip() for s in cran_package.get("Depends", "").split(",") if s.strip() + ] + imports = [ + s.strip() for s in cran_package.get("Imports", "").split(",") if s.strip() + ] + links = [ + s.strip() for s in cran_package.get("LinkingTo", "").split(",") if s.strip() + ] dep_dict = {} @@ -1228,133 +1398,162 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: - sys.exit("Could not parse version from dependency of %s: %s" % - (package, s)) - name = match.group('name') + sys.exit(f"Could not parse version from dependency of {package}: {s}") + name = match.group("name") if name in seen: continue seen.add(name) - archs = match.group('archs') - relop = match.group('relop') or '' - ver = match.group('version') or '' - ver = ver.replace('-', '_') + archs = match.group("archs") + relop = match.group("relop") or "" + ver = match.group("version") or "" + ver = ver.replace("-", "_") # If there is a relop there should be a version assert not relop or ver if archs: - sys.exit("Don't know how to handle archs from dependency of " - "package %s: %s" % (package, s)) + sys.exit( + "Don't know how to handle archs from dependency of " + f"package {package}: {s}" + ) - dep_dict[name] = f'{relop}{ver}' + dep_dict[name] = f"{relop}{ver}" - if 'R' not in dep_dict: - dep_dict['R'] = '' + if "R" not in dep_dict: + dep_dict["R"] = "" - os_type = cran_package.get("OS_type", '') - if os_type != 'unix' and os_type != 'windows' and os_type != '': + os_type = cran_package.get("OS_type", "") + if os_type != "unix" and os_type != "windows" and os_type != "": print(f"Unknown OS_type: {os_type} in CRAN package") - os_type = '' - if os_type == 'unix': - d['skip_os'] = 'skip: True # [not unix]' + os_type = "" + if os_type == "unix": + d["skip_os"] = "skip: True # [not unix]" d["noarch_generic"] = "" - if os_type == 'windows': - d['skip_os'] = 'skip: True # [not win]' + if os_type == "windows": + d["skip_os"] = "skip: True # [not win]" d["noarch_generic"] = "" - if os_type == '' and no_comments: - d['skip_os'] = '' - elif os_type == '': - d['skip_os'] = '# no skip' + if os_type == "" and no_comments: + d["skip_os"] = "" + elif os_type == "": + d["skip_os"] = "# no skip" need_git = is_github_url - if cran_package.get("NeedsCompilation", 'no') == 'yes': - with tarfile.open(available['source']['cached_path']) as tf: - need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77', '.f95', '.f03')) for f in tf]) + if cran_package.get("NeedsCompilation", "no") == "yes": + with tarfile.open(available["source"]["cached_path"]) as tf: + need_f = any( + [ + f.name.lower().endswith((".f", ".f90", ".f77", ".f95", ".f03")) + for f in tf + ] + ) # Fortran builds use CC to perform the link (they do not call the linker directly). - need_c = True if need_f else \ - any([f.name.lower().endswith('.c') for f in tf]) - need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) - for f in tf]) - need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) - need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ - any([f.name.lower().endswith(('/makefile', '/makevars')) - for f in tf]) + need_c = ( + True if need_f else any([f.name.lower().endswith(".c") for f in tf]) + ) + need_cxx = any( + [ + f.name.lower().endswith((".cxx", ".cpp", ".cc", ".c++")) + for f in tf + ] + ) + need_autotools = any( + [f.name.lower().endswith("/configure") for f in tf] + ) + need_make = ( + True + if any((need_autotools, need_f, need_cxx, need_c)) + else any( + [ + f.name.lower().endswith(("/makefile", "/makevars")) + for f in tf + ] + ) + ) else: need_c = need_cxx = need_f = need_autotools = need_make = False - if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict: + if "Rcpp" in dep_dict or "RcppArmadillo" in dep_dict: need_cxx = True if need_cxx: need_c = True - for dep_type in ['build', 'host', 'run']: - + for dep_type in ["build", "host", "run"]: deps = [] # Put non-R dependencies first. - if dep_type == 'build': + if dep_type == "build": if need_c: - deps.append("{indent}{{{{ compiler('c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ compiler('c') }}}} {sel_src_not_win}" + ) + deps.append( + f"{INDENT}{{{{ compiler('m2w64_c') }}}} {sel_src_and_win}" + ) if need_cxx: - deps.append("{indent}{{{{ compiler('cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ compiler('cxx') }}}} {sel_src_not_win}" + ) + deps.append( + f"{INDENT}{{{{ compiler('m2w64_cxx') }}}} {sel_src_and_win}" + ) if need_f: - deps.append("{indent}{{{{ compiler('fortran') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_fortran') }}}}{sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ compiler('fortran') }}}} {sel_src_not_win}" + ) + deps.append( + f"{INDENT}{{{{ compiler('m2w64_fortran') }}}}{sel_src_and_win}" + ) if use_rtools_win: need_c = need_cxx = need_f = need_autotools = need_make = False - deps.append("{indent}rtools {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append(f"{INDENT}rtools {sel_src_and_win}") # extsoft is legacy. R packages will download rwinlib subprojects # as necessary according to Jeroen Ooms. (may need to disable that # for non-MRO builds or maybe switch to Jeroen's toolchain?) # deps.append("{indent}{{{{native}}}}extsoft {sel}".format( # indent=INDENT, sel=sel_src_and_win)) if need_autotools or need_make or need_git: - deps.append("{indent}{{{{ posix }}}}filesystem {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ posix }}}}filesystem {sel_src_and_win}" + ) if need_git: deps.append(f"{INDENT}{{{{ posix }}}}git") if need_autotools: - deps.append("{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}grep {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}autoconf {sel}".format( - indent=INDENT, sel=sel_src)) - deps.append("{indent}{{{{ posix }}}}automake {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ posix }}}}automake-wrapper{sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ posix }}}}sed {sel_src_and_win}" + ) + deps.append( + f"{INDENT}{{{{ posix }}}}grep {sel_src_and_win}" + ) + deps.append(f"{INDENT}{{{{ posix }}}}autoconf {sel_src}") + deps.append( + f"{INDENT}{{{{ posix }}}}automake {sel_src_not_win}" + ) + deps.append( + f"{INDENT}{{{{ posix }}}}automake-wrapper{sel_src_and_win}" + ) deps.append(f"{INDENT}{{{{ posix }}}}pkg-config") if need_make: - deps.append("{indent}{{{{ posix }}}}make {sel}".format( - indent=INDENT, sel=sel_src)) + deps.append(f"{INDENT}{{{{ posix }}}}make {sel_src}") if not need_autotools: - deps.append("{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}coreutils {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}zip {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{ posix }}}}sed {sel_src_and_win}" + ) + deps.append( + f"{INDENT}{{{{ posix }}}}coreutils {sel_src_and_win}" + ) + deps.append(f"{INDENT}{{{{ posix }}}}zip {sel_src_and_win}") if add_cross_r_base: deps.append(f"{INDENT}cross-r-base {{{{ r_base }}}} {sel_cross}") - elif dep_type == 'run': + elif dep_type == "run": if need_c or need_cxx or need_f: - deps.append("{indent}{{{{native}}}}gcc-libs {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + f"{INDENT}{{{{native}}}}gcc-libs {sel_src_and_win}" + ) - if dep_type == 'host' or dep_type == 'run': + if dep_type == "host" or dep_type == "run": for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue - if name == 'R': + if name == "R": # Put R first # Regarless of build or run, and whether this is a # recommended package or not, it can only depend on @@ -1364,29 +1563,32 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No # that are in the recommended group. # We don't include any R version restrictions because # conda-build always pins r-base and mro-base version. - deps.insert(0, f'{INDENT}{r_interp}') + deps.insert(0, f"{INDENT}{r_interp}") else: - conda_name = 'r-' + name.lower() + conda_name = "r-" + name.lower() if dep_dict[name]: - deps.append('{indent}{name} {version}'.format(name=conda_name, - version=dep_dict[name], indent=INDENT)) + deps.append(f"{INDENT}{conda_name} {dep_dict[name]}") else: - deps.append('{indent}{name}'.format(name=conda_name, - indent=INDENT)) + deps.append(f"{INDENT}{conda_name}") if recursive: lower_name = name.lower() if lower_name not in package_dicts: - inputs_dict = package_to_inputs_dict(output_dir, output_suffix, - git_tag, lower_name, None) - assert lower_name == inputs_dict['pkg-name'], \ - "name {} != inputs_dict['pkg-name'] {}".format( - name, inputs_dict['pkg-name']) + inputs_dict = package_to_inputs_dict( + output_dir, output_suffix, git_tag, lower_name, None + ) + assert ( + lower_name == inputs_dict["pkg-name"] + ), "name {} != inputs_dict['pkg-name'] {}".format( + name, inputs_dict["pkg-name"] + ) assert lower_name not in package_list - package_dicts.update({lower_name: {'inputs': inputs_dict}}) + package_dicts.update( + {lower_name: {"inputs": inputs_dict}} + ) package_list.append(lower_name) - d['%s_depends' % dep_type] = ''.join(deps) + d[f"{dep_type}_depends"] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1395,55 +1597,68 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No for package in package_dicts: d = package_dicts[package] - dir_path = d['inputs']['new-location'] + dir_path = d["inputs"]["new-location"] if exists(dir_path) and not version_compare: - if update_policy == 'error': - raise RuntimeError("directory already exists " - "(and --update-policy is 'error'): %s" % dir_path) - elif update_policy == 'overwrite': + if update_policy == "error": + raise RuntimeError( + "directory already exists " + f"(and --update-policy is 'error'): {dir_path}" + ) + elif update_policy == "overwrite": rm_rf(dir_path) - elif update_policy == 'skip-up-to-date': + elif update_policy == "skip-up-to-date": if cran_index is None: session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) - if up_to_date(cran_index, d['inputs']['old-metadata']): + if up_to_date(cran_index, d["inputs"]["old-metadata"]): continue - elif update_policy == 'skip-existing' and d['inputs']['old-metadata']: + elif update_policy == "skip-existing" and d["inputs"]["old-metadata"]: continue - from_sources = d['from_source'] + from_sources = d["from_source"] # Normalize the metadata values - d = {k: unicodedata.normalize("NFKD", str(v)).encode('ascii', 'ignore') - .decode() for k, v in d.items()} + d = { + k: unicodedata.normalize("NFKD", str(v)).encode("ascii", "ignore").decode() + for k, v in d.items() + } try: makedirs(join(dir_path)) except: pass - print("Writing recipe for %s" % package.lower()) - with open(join(dir_path, 'meta.yaml'), 'w') as f: + print(f"Writing recipe for {package.lower()}") + with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) - if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite': - with open(join(dir_path, 'build.sh'), 'wb') as f: + if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": + with open(join(dir_path, "build.sh"), "wb") as f: if from_sources == _all: - f.write(CRAN_BUILD_SH_SOURCE.format(**d).encode('utf-8')) + f.write(CRAN_BUILD_SH_SOURCE.format(**d).encode("utf-8")) elif from_sources == []: - f.write(CRAN_BUILD_SH_BINARY.format(**d).encode('utf-8')) + f.write(CRAN_BUILD_SH_BINARY.format(**d).encode("utf-8")) else: tpbt = [target_platform_bash_test_by_sel[t] for t in from_sources] - d['source_pf_bash'] = ' || '.join(['[[ ${target_platform} ' + s + ' ]]' - for s in tpbt]) - f.write(CRAN_BUILD_SH_MIXED.format(**d).encode('utf-8')) - - if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite': - with open(join(dir_path, 'bld.bat'), 'wb') as f: - if len([fs for fs in from_sources if fs.startswith('win')]) == 2: - f.write(CRAN_BLD_BAT_SOURCE.format(**d).replace('\n', '\r\n').encode('utf-8')) + d["source_pf_bash"] = " || ".join( + ["[[ ${target_platform} " + s + " ]]" for s in tpbt] + ) + f.write(CRAN_BUILD_SH_MIXED.format(**d).encode("utf-8")) + + if not exists(join(dir_path, "bld.bat")) or update_policy == "overwrite": + with open(join(dir_path, "bld.bat"), "wb") as f: + if len([fs for fs in from_sources if fs.startswith("win")]) == 2: + f.write( + CRAN_BLD_BAT_SOURCE.format(**d) + .replace("\n", "\r\n") + .encode("utf-8") + ) else: - f.write(CRAN_BLD_BAT_MIXED.format(**d).replace('\n', '\r\n').encode('utf-8')) + f.write( + CRAN_BLD_BAT_MIXED.format(**d) + .replace("\n", "\r\n") + .encode("utf-8") + ) -def version_compare(recipe_dir, newest_conda_version): - m = metadata.MetaData(recipe_dir) +def version_compare(recipe_dir: str, newest_conda_version): + m = MetaData(recipe_dir) local_version = m.version() package = basename(recipe_dir) @@ -1458,7 +1673,7 @@ def get_outdated(output_dir, cran_index, packages=()): to_update = [] recipes = listdir(output_dir) for recipe in recipes: - if not recipe.startswith('r-') or not isdir(recipe): + if not recipe.startswith("r-") or not isdir(recipe): continue recipe_name = recipe[2:] @@ -1467,24 +1682,24 @@ def get_outdated(output_dir, cran_index, packages=()): continue if recipe_name not in cran_index: - print("Skipping %s, not found on CRAN" % recipe) + print(f"Skipping {recipe}, not found on CRAN") continue - version_compare(join(output_dir, recipe), - cran_index[recipe_name][1].replace('-', '_')) + version_compare( + join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") + ) - print("Updating %s" % recipe) + print(f"Updating {recipe}") to_update.append(recipe_name) return to_update def get_existing(output_dir, cran_index, packages=()): - existing = [] recipes = listdir(output_dir) for recipe in recipes: - if not recipe.startswith('r-') or not isdir(recipe): + if not recipe.startswith("r-") or not isdir(recipe): continue recipe_name = recipe[2:] @@ -1506,7 +1721,7 @@ def up_to_date(cran_index, package): return False # For now. We can do better; need to collect *all* information upfront. - if 'github.com' in location: + if "github.com" in location: return False else: if cran_pkg_name not in cran_index: @@ -1540,28 +1755,34 @@ def get_license_info(license_text, allowed_license_families): # The list order matters. The first element should be the name of the # license file shipped with r-base. - d_license = {'agpl3': ['AGPL-3', 'AGPL (>= 3)', 'AGPL', - 'GNU Affero General Public License'], - 'artistic2': ['Artistic-2.0', 'Artistic License 2.0'], - 'gpl2': ['GPL-2', 'GPL (>= 2)', 'GNU General Public License (>= 2)'], - 'gpl3': ['GPL-3', 'GPL (>= 3)', 'GNU General Public License (>= 3)', - 'GPL', 'GNU General Public License'], - 'lgpl2': ['LGPL-2', 'LGPL (>= 2)'], - 'lgpl21': ['LGPL-2.1', 'LGPL (>= 2.1)'], - 'lgpl3': ['LGPL-3', 'LGPL (>= 3)', 'LGPL', - 'GNU Lesser General Public License'], - 'bsd2': ['BSD_2_clause', 'BSD_2_Clause', 'BSD 2-clause License'], - 'bsd3': ['BSD_3_clause', 'BSD_3_Clause', 'BSD 3-clause License'], - 'mit': ['MIT'], - } - - license_file_template = '\'{{{{ environ["PREFIX"] }}}}/lib/R/share/licenses/{license_id}\'' + d_license = { + "agpl3": ["AGPL-3", "AGPL (>= 3)", "AGPL", "GNU Affero General Public License"], + "artistic2": ["Artistic-2.0", "Artistic License 2.0"], + "gpl2": ["GPL-2", "GPL (>= 2)", "GNU General Public License (>= 2)"], + "gpl3": [ + "GPL-3", + "GPL (>= 3)", + "GNU General Public License (>= 3)", + "GPL", + "GNU General Public License", + ], + "lgpl2": ["LGPL-2", "LGPL (>= 2)"], + "lgpl21": ["LGPL-2.1", "LGPL (>= 2.1)"], + "lgpl3": ["LGPL-3", "LGPL (>= 3)", "LGPL", "GNU Lesser General Public License"], + "bsd2": ["BSD_2_clause", "BSD_2_Clause", "BSD 2-clause License"], + "bsd3": ["BSD_3_clause", "BSD_3_Clause", "BSD 3-clause License"], + "mit": ["MIT"], + } + + license_file_template = ( + "'{{{{ environ[\"PREFIX\"] }}}}/lib/R/share/licenses/{license_id}'" + ) license_texts = [] license_files = [] # split license_text by "|" and "+" into parts for further matching - license_text_parts = [l_opt.strip() for l_opt in re.split(r'\||\+', license_text)] + license_text_parts = [l_opt.strip() for l_opt in re.split(r"\||\+", license_text)] for l_opt in license_text_parts: # the file case if l_opt.startswith("file "): @@ -1574,14 +1795,18 @@ def get_license_info(license_text, allowed_license_families): l_opt_text = d_license[license_id][0] license_texts.append(l_opt_text) - license_files.append(license_file_template.format(license_id=l_opt_text)) + license_files.append( + license_file_template.format(license_id=l_opt_text) + ) break # Join or fallback to original license_text if matched license_texts is empty license_text = " | ".join(license_texts) or license_text # Build the license_file entry and ensure it is empty if no license file - license_file = "license_file:\n - " + "\n - ".join(license_files) if license_files else "" + license_file = "" + if license_files: + license_file = f"license_file:{dashlist(license_files, indent=4)}\n" # Only one family is allowed, so guessing it once license_family = guess_license_family(license_text, allowed_license_families) diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index e83e76fd65..41ec499bad 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -8,14 +8,16 @@ # - mingw32 support (really any windows support, completely untested) # - replace manual "luajit -e require 'blah'" with built-in entry-point testing +from __future__ import annotations + +import json import os import subprocess import tempfile from glob import glob -import json from sys import platform as _platform -INDENT = '\n - ' +INDENT = "\n - " rockspec_parser = """ local ok,cjson = pcall(require, "cjson") @@ -145,7 +147,7 @@ def add_parser(repos): ) luarocks.add_argument( "packages", - nargs='+', + nargs="+", help="luarocks packages to create recipe skeletons for.", ) luarocks.add_argument( @@ -159,8 +161,9 @@ def add_parser(repos): ) luarocks.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.') + action="store_true", + help="Create recipes for dependencies if they do not already exist.", + ) def package_exists(package_name): @@ -171,7 +174,7 @@ def package_exists(package_name): def getval(spec, k): if k not in spec: - raise Exception("Required key %s not in spec" % k) + raise Exception(f"Required key {k} not in spec") else: return spec[k] @@ -181,7 +184,7 @@ def warn_against_branches(branch): print("=========================================") print("") print("WARNING:") - print("Building a rock referenced to branch %s." % branch) + print(f"Building a rock referenced to branch {branch}.") print("This is not a tag. This is dangerous, because rebuilding") print("at a later date may produce a different package.") print("Please replace with a tag, git commit, or tarball.") @@ -223,8 +226,12 @@ def ensure_base_deps(deps): return deps -def skeletonize(packages, output_dir=".", version=None, recursive=False): - +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, +) -> None: # Check that we have Lua installed (any version) # Check that we have luarocks installed @@ -245,33 +252,37 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): while packages: package = packages.pop() - packagename = "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() - d = package_dicts.setdefault(package, + packagename = ( + f"lua-{package.lower()}" if package[:4] != "lua-" else package.lower() + ) + d = package_dicts.setdefault( + package, { - 'packagename': packagename, - 'version': "0.0", - 'filename': "", - 'url': "", - 'md5': "", - 'usemd5': "# ", - 'usefile': "# ", - 'usegit': "# ", - 'usegittag': "# ", - 'usegitrev': "# ", - 'gittag': "", - 'gitrev': "", - 'noarch_python_comment': "# ", - 'build_depends': "", - 'run_depends': "", - 'test_comment': "", - 'entry_comment': "", - 'test_commands': "", - 'home_comment': "# ", - 'homeurl': "", - 'license': "Unknown", - 'summary_comment': "# ", - 'summary': "", - }) + "packagename": packagename, + "version": "0.0", + "filename": "", + "url": "", + "md5": "", + "usemd5": "# ", + "usefile": "# ", + "usegit": "# ", + "usegittag": "# ", + "usegitrev": "# ", + "gittag": "", + "gitrev": "", + "noarch_python_comment": "# ", + "build_depends": "", + "run_depends": "", + "test_comment": "", + "entry_comment": "", + "test_commands": "", + "home_comment": "# ", + "homeurl": "", + "license": "Unknown", + "summary_comment": "# ", + "summary": "", + }, + ) # Download rockspec o = subprocess.call(["luarocks", "download", package, "--rockspec"]) @@ -282,67 +293,68 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): fs = glob(package + "*.rockspec") if len(fs) != 1: raise Exception("Failed to download rockspec") - d['rockspec_file'] = fs[0] + d["rockspec_file"] = fs[0] # Parse the rockspec into a dictionary - p = subprocess.Popen(["lua", "-e", rockspec_parser % d['rockspec_file']], - stdout=subprocess.PIPE) + p = subprocess.Popen( + ["lua", "-e", rockspec_parser % d["rockspec_file"]], stdout=subprocess.PIPE + ) out, err = p.communicate() if "ERROR" in out: raise Exception(out.replace("ERROR: ", "")) spec = json.loads(out) # Gather the basic details - d['rockname'] = getval(spec, "package") - d['version'] = getval(spec, "version") - d['version'] = "".join([c for c in d['version'] if c.isalnum()]) + d["rockname"] = getval(spec, "package") + d["version"] = getval(spec, "version") + d["version"] = "".join([c for c in d["version"] if c.isalnum()]) source = getval(spec, "source") # Figure out how to download the package, and from where - d['url'] = getval(source, "url") - ext = os.path.splitext(d['url'])[-1] + d["url"] = getval(source, "url") + ext = os.path.splitext(d["url"])[-1] if ext in [".zip", ".tar", ".tar.bz2", ".tar.xz", ".tar.gz"]: - d['usefile'] = "" - d['filename'] = os.path.split(d['url'])[-1] + d["usefile"] = "" + d["filename"] = os.path.split(d["url"])[-1] if "md5" in source: md5 = getval(source, "md5") if len(md5): - d['md5'] = md5 - d['usemd5'] = "" - elif ext in [".git"] or d['url'][:4] == "git:": - d['usegit'] = "" + d["md5"] = md5 + d["usemd5"] = "" + elif ext in [".git"] or d["url"][:4] == "git:": + d["usegit"] = "" # Check if we're using a tag or a commit if "tag" in source: - d['usegittag'] = "" - d['gittag'] = getval(source, "tag") + d["usegittag"] = "" + d["gittag"] = getval(source, "tag") elif "branch" in source: - d['usegittag'] = "" - d['gittag'] = getval(source, "branch") - warn_against_branches(d['gittag']) + d["usegittag"] = "" + d["gittag"] = getval(source, "branch") + warn_against_branches(d["gittag"]) else: - d['usegittag'] = "" - d['gittag'] = "master" - warn_against_branches(d['gittag']) + d["usegittag"] = "" + d["gittag"] = "master" + warn_against_branches(d["gittag"]) # Gather the description if "description" in spec: desc = getval(spec, "description") if "homepage" in desc: - d['homeurl'] = desc['homepage'] - d['home_comment'] = "" + d["homeurl"] = desc["homepage"] + d["home_comment"] = "" if "summary" in desc: - d['summary'] = desc['summary'] - d['summary_comment'] = "" + d["summary"] = desc["summary"] + d["summary_comment"] = "" if "license" in desc: - d['license'] = desc['license'] + d["license"] = desc["license"] # Gather the dependencies if "dependencies" in spec: deps = getval(spec, "dependencies") if len(deps): deps = ensure_base_deps([format_dep(dep) for dep in deps]) - d['build_depends'] = INDENT.join([''] + deps) - d['run_depends'] = d['build_depends'] + d["build_depends"] = INDENT.join([""] + deps) + d["run_depends"] = d["build_depends"] # Build some entry-point tests. if "build" in spec: @@ -352,33 +364,37 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): our_plat = "unix" modules = None - if "modules" in spec['build']: - modules = spec['build']["modules"] - elif "platforms" in spec['build']: - if our_plat in spec['build']['platforms']: - if "modules" in spec['build']['platforms'][our_plat]: - modules = spec['build']['platforms'][our_plat]["modules"] + if "modules" in spec["build"]: + modules = spec["build"]["modules"] + elif "platforms" in spec["build"]: + if our_plat in spec["build"]["platforms"]: + if "modules" in spec["build"]["platforms"][our_plat]: + modules = spec["build"]["platforms"][our_plat]["modules"] if modules: - d['test_commands'] = INDENT.join([''] + - ["""lua -e "require '%s'\"""" % r - for r in modules.keys()]) + d["test_commands"] = INDENT.join( + [""] + [f"""lua -e "require '{r}'\"""" for r in modules.keys()] + ) # If we didn't find any modules to import, import the base name - if d['test_commands'] == "": - d['test_commands'] = INDENT.join([''] + ["""lua -e "require '%s'" """ % d['rockname']]) + if d["test_commands"] == "": + d["test_commands"] = INDENT.join( + [""] + ["""lua -e "require '{}'" """.format(d["rockname"])] + ) # Build the luarocks skeleton os.chdir(cwd) for package in package_dicts: d = package_dicts[package] - name = d['packagename'] + name = d["packagename"] os.makedirs(os.path.join(output_dir, name)) - print(f"Writing recipe for {package.lower()} to {os.path.join(output_dir, name)}") - with open(os.path.join(output_dir, name, 'meta.yaml'), 'w') as f: + print( + f"Writing recipe for {package.lower()} to {os.path.join(output_dir, name)}" + ) + with open(os.path.join(output_dir, name, "meta.yaml"), "w") as f: f.write(LUAROCKS_META.format(**d)) - with open(os.path.join(output_dir, name, 'build.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "build.sh"), "w") as f: f.write(LUAROCKS_BUILD_SH.format(**d)) - with open(os.path.join(output_dir, name, 'post-link.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "post-link.sh"), "w") as f: f.write(LUAROCKS_POSTLINK_SH) - with open(os.path.join(output_dir, name, 'pre-unlink.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "pre-unlink.sh"), "w") as f: f.write(LUAROCKS_PREUNLINK_SH.format(**d)) diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 61f5baafb0..d3b716bc8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -4,41 +4,53 @@ Tools for converting PyPI packages to conda recipes. """ +from __future__ import annotations -from collections import defaultdict, OrderedDict +import configparser import keyword import logging import os -from os import makedirs, listdir, getcwd, chdir -from os.path import join, isdir, exists, isfile, abspath - -from pkg_resources import parse_version import re -from shutil import copy2 import subprocess import sys +from collections import OrderedDict, defaultdict +from io import StringIO +from os import chdir, getcwd, listdir, makedirs +from os.path import abspath, exists, isdir, isfile, join +from shutil import copy2 from tempfile import mkdtemp +from typing import TYPE_CHECKING +from urllib.parse import urljoin, urlsplit import pkginfo import requests -from requests.packages.urllib3.util.url import parse_url -from urllib.parse import urljoin, urlsplit import yaml +from conda.base.context import context +from conda.cli.common import spec_from_line +from conda.gateways.connection.download import download +from conda.gateways.disk.read import compute_sum +from conda.models.version import normalized_version +from conda.utils import human_bytes +from requests.packages.urllib3.util.url import parse_url -from conda_build.conda_interface import spec_from_line -from conda_build.conda_interface import input, configparser, StringIO -from conda_build.conda_interface import download -from conda_build.conda_interface import normalized_version -from conda_build.conda_interface import human_bytes, hashsum_file -from conda_build.conda_interface import default_python - -from conda_build.utils import decompressible_exts, tar_xf, rm_rf, check_call_env, ensure_list -from conda_build.source import apply_patch -from conda_build.environ import create_env -from conda_build.config import Config -from conda_build.metadata import MetaData -from conda_build.license_family import allowed_license_families, guess_license_family -from conda_build.render import FIELDS as EXPECTED_SECTION_ORDER +from ..config import Config +from ..environ import create_env +from ..license_family import allowed_license_families, guess_license_family +from ..metadata import MetaData +from ..render import FIELDS as EXPECTED_SECTION_ORDER +from ..source import apply_patch +from ..utils import ( + check_call_env, + decompressible_exts, + ensure_list, + on_win, + rm_rf, + tar_xf, +) +from ..version import _parse as parse_version + +if TYPE_CHECKING: + from typing import Iterable pypi_example = """ Examples: @@ -58,11 +70,18 @@ # Definition of REQUIREMENTS_ORDER below are from # https://github.com/conda-forge/conda-smithy/blob/master/conda_smithy/lint_recipe.py#L16 -REQUIREMENTS_ORDER = ['host', 'run'] +REQUIREMENTS_ORDER = ["host", "run"] # Definition of ABOUT_ORDER reflects current practice -ABOUT_ORDER = ['home', 'license', 'license_family', 'license_file', 'summary', - 'doc_url', 'dev_url'] +ABOUT_ORDER = [ + "home", + "license", + "license_family", + "license_file", + "summary", + "doc_url", + "dev_url", +] PYPI_META_HEADER = """{{% set name = "{packagename}" %}} {{% set version = "{version}" %}} @@ -74,21 +93,29 @@ # The top-level ordering is irrelevant because the write order of 'package', # etc. is determined by EXPECTED_SECTION_ORDER. PYPI_META_STATIC = { - 'package': OrderedDict([ - ('name', '{{ name|lower }}'), - ('version', '{{ version }}'), - ]), - 'source': OrderedDict([ - ('url', '/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz'), # NOQA - ]), - 'build': OrderedDict([ - ('number', 0), - ]), - 'extra': OrderedDict([ - ('recipe-maintainers', '') - ]), + "package": OrderedDict( + [ + ("name", "{{ name|lower }}"), + ("version", "{{ version }}"), + ] + ), + "source": OrderedDict( + [ + ( + "url", + "/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz", + ), # NOQA + ] + ), + "build": OrderedDict( + [ + ("number", 0), + ] + ), + "extra": OrderedDict([("recipe-maintainers", "")]), } + # Note the {} formatting bits here DISTUTILS_PATCH = '''\ diff core.py core.py @@ -102,8 +129,8 @@ +import io +import os.path +import sys -+import yaml -+from yaml import Loader, SafeLoader ++import ruamel.yaml as yaml ++from ruamel.yaml import Loader, SafeLoader + +# Override the default string handling function to always return unicode +# objects (taken from StackOverflow) @@ -136,7 +163,7 @@ def run_setup (script_name, script_args=None, stop_after="run"): """Run a setup script in a somewhat controlled environment, and ''' -INDENT = '\n - ' +INDENT = "\n - " def _ssl_no_verify(): @@ -145,22 +172,22 @@ def _ssl_no_verify(): This provides a workaround for users in some corporate environments where MITM style proxies make it difficult to fetch data over HTTPS. """ - return os.environ.get('SSL_NO_VERIFY', '').strip().lower() in ('1', 'true') + return os.environ.get("SSL_NO_VERIFY", "").strip().lower() in ("1", "true") def package_exists(package_name, pypi_url=None): if not pypi_url: - pypi_url = 'https://pypi.io/pypi' + pypi_url = "https://pypi.io/pypi" # request code will be 404 if the package does not exist. Requires exact match. - r = requests.get(pypi_url + '/' + package_name, verify=not _ssl_no_verify()) + r = requests.get(pypi_url + "/" + package_name, verify=not _ssl_no_verify()) return r.status_code != 404 -def __print_with_indent(line, prefix='', suffix='', level=0, newline=True): - output = '' +def __print_with_indent(line, prefix="", suffix="", level=0, newline=True): + output = "" if level: - output = ' ' * level - return output + prefix + line + suffix + ('\n' if newline else '') + output = " " * level + return output + prefix + line + suffix + ("\n" if newline else "") def _print_dict(recipe_metadata, order=None, level=0, indent=2): @@ -173,30 +200,40 @@ def _print_dict(recipe_metadata, order=None, level=0, indent=2): :param int indent: Indentation - Number of empty spaces for each level :return string: Recipe rendered with the metadata """ - rendered_recipe = '' + rendered_recipe = "" if not order: order = sorted(list(recipe_metadata.keys())) for section_name in order: if section_name in recipe_metadata and recipe_metadata[section_name]: - rendered_recipe += __print_with_indent(section_name, suffix=':') - for attribute_name, attribute_value in recipe_metadata[section_name].items(): + rendered_recipe += __print_with_indent(section_name, suffix=":") + for attribute_name, attribute_value in recipe_metadata[ + section_name + ].items(): if attribute_value is None: continue - if isinstance(attribute_value, str) or not hasattr(attribute_value, "__iter__"): - rendered_recipe += __print_with_indent(attribute_name, suffix=':', level=level + indent, - newline=False) + if isinstance(attribute_value, str) or not hasattr( + attribute_value, "__iter__" + ): + rendered_recipe += __print_with_indent( + attribute_name, suffix=":", level=level + indent, newline=False + ) rendered_recipe += _formating_value(attribute_name, attribute_value) - elif hasattr(attribute_value, 'keys'): - rendered_recipe += _print_dict(attribute_value, sorted(list(attribute_value.keys()))) + elif hasattr(attribute_value, "keys"): + rendered_recipe += _print_dict( + attribute_value, sorted(list(attribute_value.keys())) + ) # assume that it's a list if it exists at all elif attribute_value: - rendered_recipe += __print_with_indent(attribute_name, suffix=':', level=level + indent) + rendered_recipe += __print_with_indent( + attribute_name, suffix=":", level=level + indent + ) for item in attribute_value: - rendered_recipe += __print_with_indent(item, prefix='- ', - level=level + indent) + rendered_recipe += __print_with_indent( + item, prefix="- ", level=level + indent + ) # add a newline in between sections if level == 0: - rendered_recipe += '\n' + rendered_recipe += "\n" return rendered_recipe @@ -209,71 +246,87 @@ def _formating_value(attribute_name, attribute_value): :param string attribute_value: Attribute value :return string: Value quoted if need """ - pattern_search = re.compile(r'[@_!#$%^&*()<>?/\|}{~:]') - if isinstance(attribute_value, str) \ - and pattern_search.search(attribute_value) \ - or attribute_name in ["summary", "description", "version", "script"]: + pattern_search = re.compile(r"[@_!#$%^&*()<>?/\|}{~:]") + if ( + isinstance(attribute_value, str) + and pattern_search.search(attribute_value) + or attribute_name in ["summary", "description", "version", "script"] + ): return ' "' + str(attribute_value) + '"\n' - return ' ' + str(attribute_value) + '\n' - - -def skeletonize(packages, output_dir=".", version=None, recursive=False, - all_urls=False, pypi_url='https://pypi.io/pypi/', noprompt=True, - version_compare=False, python_version=None, manual_url=False, - all_extras=False, noarch_python=False, config=None, setup_options=None, - extra_specs=[], - pin_numpy=False): + return " " + str(attribute_value) + "\n" + + +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + all_urls: bool = False, + pypi_url: str = "https://pypi.io/pypi/", + noprompt: bool = True, + version_compare: bool = False, + python_version: str | None = None, + manual_url: bool = False, + all_extras: bool = False, + noarch_python: bool = False, + config: Config | None = None, + setup_options: str | Iterable[str] | None = None, + extra_specs: str | Iterable[str] | None = None, + pin_numpy: bool = False, +) -> None: package_dicts = {} - if not setup_options: - setup_options = [] - - if isinstance(setup_options, str): - setup_options = [setup_options] + setup_options = ensure_list(setup_options) + extra_specs = ensure_list(extra_specs) if not config: config = Config() - python_version = python_version or config.variant.get('python', default_python) + if not python_version: + python_version = config.variant.get("python", context.default_python) created_recipes = [] while packages: package = packages.pop() created_recipes.append(package) - is_url = ':' in package + is_url = ":" in package if is_url: - package_pypi_url = '' + package_pypi_url = "" else: - package_pypi_url = urljoin(pypi_url, '/'.join((package, 'json'))) + package_pypi_url = urljoin(pypi_url, "/".join((package, "json"))) if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: - raise RuntimeError("directory already exists: %s" % dir_path) - d = package_dicts.setdefault(package, + raise RuntimeError(f"directory already exists: {dir_path}") + d = package_dicts.setdefault( + package, { - 'packagename': package, - 'run_depends': '', - 'build_depends': '', - 'entry_points': '', - 'test_commands': '', - 'tests_require': '', - }) + "packagename": package, + "run_depends": "", + "build_depends": "", + "entry_points": "", + "test_commands": "", + "tests_require": "", + }, + ) if is_url: - del d['packagename'] + del d["packagename"] if is_url: - d['version'] = 'UNKNOWN' + d["version"] = "UNKNOWN" # Make sure there is always something to pass in for this pypi_data = {} else: pypi_resp = requests.get(package_pypi_url, verify=not _ssl_no_verify()) if pypi_resp.status_code != 200: - sys.exit("Request to fetch %s failed with status: %d" - % (package_pypi_url, pypi_resp.status_code)) + sys.exit( + "Request to fetch %s failed with status: %d" + % (package_pypi_url, pypi_resp.status_code) + ) pypi_data = pypi_resp.json() @@ -283,65 +336,74 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, version_compare(versions) if version: if version not in versions: - sys.exit("Error: Version %s of %s is not available on PyPI." - % (version, package)) - d['version'] = version + sys.exit( + f"Error: Version {version} of {package} is not available on PyPI." + ) + d["version"] = version else: # select the most visible version from PyPI. if not versions: - sys.exit("Error: Could not find any versions of package %s" % package) + sys.exit(f"Error: Could not find any versions of package {package}") if len(versions) > 1: - print("Warning, the following versions were found for %s" % - package) + print(f"Warning, the following versions were found for {package}") for ver in versions: print(ver) - print("Using %s" % versions[-1]) + print(f"Using {versions[-1]}") print("Use --version to specify a different version.") - d['version'] = versions[-1] + d["version"] = versions[-1] - data, d['pypiurl'], d['filename'], d['digest'] = get_download_data(pypi_data, - package, - d['version'], - is_url, all_urls, - noprompt, manual_url) + data, d["pypiurl"], d["filename"], d["digest"] = get_download_data( + pypi_data, package, d["version"], is_url, all_urls, noprompt, manual_url + ) - d['import_tests'] = '' + d["import_tests"] = "" # Get summary directly from the metadata returned # from PyPI. summary will be pulled from package information in # get_package_metadata or a default value set if it turns out that # data['summary'] is empty. Ignore description as it is too long. - d['summary'] = data.get('summary', '') - get_package_metadata(package, d, data, output_dir, python_version, - all_extras, recursive, created_recipes, noarch_python, - noprompt, packages, extra_specs, config=config, - setup_options=setup_options) + d["summary"] = data.get("summary", "") + get_package_metadata( + package, + d, + data, + output_dir, + python_version, + all_extras, + recursive, + created_recipes, + noarch_python, + noprompt, + packages, + extra_specs, + config=config, + setup_options=setup_options, + ) # Set these *after* get_package_metadata so that the preferred hash # can be calculated from the downloaded file, if necessary. - d['hash_type'] = d['digest'][0] - d['hash_value'] = d['digest'][1] + d["hash_type"] = d["digest"][0] + d["hash_value"] = d["digest"][1] # Change requirements to use format that guarantees the numpy # version will be pinned when the recipe is built and that # the version is included in the build string. if pin_numpy: - for depends in ['build_depends', 'run_depends']: + for depends in ["build_depends", "run_depends"]: deps = d[depends] - numpy_dep = [idx for idx, dep in enumerate(deps) - if 'numpy' in dep] + numpy_dep = [idx for idx, dep in enumerate(deps) if "numpy" in dep] if numpy_dep: # Turns out this needs to be inserted before the rest # of the numpy spec. - deps.insert(numpy_dep[0], 'numpy x.x') + deps.insert(numpy_dep[0], "numpy x.x") d[depends] = deps for package in package_dicts: d = package_dicts[package] - name = d['packagename'].lower() + name = d["packagename"].lower() makedirs(join(output_dir, name)) - print("Writing recipe for %s" % package.lower()) - with open(join(output_dir, name, 'meta.yaml'), 'w') as f: + print(f"Writing recipe for {package.lower()}") + with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) ordered_recipe = OrderedDict() @@ -352,47 +414,55 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, except KeyError: ordered_recipe[key] = OrderedDict() - if '://' not in pypi_url: + if "://" not in pypi_url: raise ValueError("pypi_url must have protocol (e.g. http://) included") base_url = urlsplit(pypi_url) base_url = "://".join((base_url.scheme, base_url.netloc)) - ordered_recipe['source']['url'] = urljoin(base_url, ordered_recipe['source']['url']) - ordered_recipe['source']['sha256'] = d['hash_value'] + ordered_recipe["source"]["url"] = urljoin( + base_url, ordered_recipe["source"]["url"] + ) + ordered_recipe["source"]["sha256"] = d["hash_value"] - if d['entry_points']: - ordered_recipe['build']['entry_points'] = d['entry_points'] + if d["entry_points"]: + ordered_recipe["build"]["entry_points"] = d["entry_points"] if noarch_python: - ordered_recipe['build']['noarch'] = 'python' + ordered_recipe["build"]["noarch"] = "python" - recipe_script_cmd = ["{{ PYTHON }} -m pip install . -vv"] - ordered_recipe['build']['script'] = ' '.join(recipe_script_cmd + setup_options) + recipe_script_cmd = [ + "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" + ] + ordered_recipe["build"]["script"] = " ".join( + recipe_script_cmd + setup_options + ) # Always require python as a dependency. Pip is because we use pip for # the install line. - ordered_recipe['requirements'] = OrderedDict() - ordered_recipe['requirements']['host'] = sorted(set(['python', 'pip'] + - list(d['build_depends']))) - ordered_recipe['requirements']['run'] = sorted(set(['python'] + - list(d['run_depends']))) + ordered_recipe["requirements"] = OrderedDict() + ordered_recipe["requirements"]["host"] = sorted( + set(["python", "pip"] + list(d["build_depends"])) + ) + ordered_recipe["requirements"]["run"] = sorted( + set(["python"] + list(d["run_depends"])) + ) - if d['import_tests']: - ordered_recipe['test']['imports'] = d['import_tests'] + if d["import_tests"]: + ordered_recipe["test"]["imports"] = d["import_tests"] - if d['test_commands']: - ordered_recipe['test']['commands'] = d['test_commands'] + if d["test_commands"]: + ordered_recipe["test"]["commands"] = d["test_commands"] - if d['tests_require']: - ordered_recipe['test']['requires'] = d['tests_require'] + if d["tests_require"]: + ordered_recipe["test"]["requires"] = d["tests_require"] - ordered_recipe['about'] = OrderedDict() + ordered_recipe["about"] = OrderedDict() for key in ABOUT_ORDER: try: - ordered_recipe['about'][key] = d[key] + ordered_recipe["about"][key] = d[key] except KeyError: - ordered_recipe['about'][key] = '' - ordered_recipe['extra']['recipe-maintainers'] = ['your-github-id-here'] + ordered_recipe["about"][key] = "" + ordered_recipe["extra"]["recipe-maintainers"] = ["your-github-id-here"] # Prune any top-level sections that are empty rendered_recipe += _print_dict(ordered_recipe, EXPECTED_SECTION_ORDER) @@ -410,14 +480,13 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, # Fix the indents recipe_lines = [] for line in rendered_recipe.splitlines(): - match = re.search(r'^\s+(-) ', line, - flags=re.MULTILINE) + match = re.search(r"^\s+(-) ", line, flags=re.MULTILINE) if match: - pre, sep, post = line.partition('-') - sep = ' ' + sep + pre, sep, post = line.partition("-") + sep = " " + sep line = pre + sep + post recipe_lines.append(line) - rendered_recipe = '\n'.join(recipe_lines) + rendered_recipe = "\n".join(recipe_lines) f.write(rendered_recipe) @@ -434,7 +503,7 @@ def add_parser(repos): ) pypi.add_argument( "packages", - nargs='+', + nargs="+", help="""PyPi packages to create recipe skeletons for. You can also specify package[extra,...] features.""", ) @@ -456,7 +525,7 @@ def add_parser(repos): ) pypi.add_argument( "--pypi-url", - default='https://pypi.io/pypi/', + default="https://pypi.io/pypi/", help="URL to use for PyPI (default: %(default)s).", ) pypi.add_argument( @@ -465,7 +534,7 @@ def add_parser(repos): default=True, dest="noprompt", help="""Prompt the user on ambiguous choices. Default is to make the - best possible choice and continue.""" + best possible choice and continue.""", ) pypi.add_argument( "--all-extras", @@ -475,66 +544,70 @@ def add_parser(repos): ) pypi.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.' + action="store_true", + help="Create recipes for dependencies if they do not already exist.", ) pypi.add_argument( "--version-compare", - action='store_true', + action="store_true", help="""Compare the package version of the recipe with all available - versions on PyPI.""" + versions on PyPI.""", ) pypi.add_argument( "--python-version", - action='store', - default=default_python, + action="store", + default=context.default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", - choices=['2.7', '3.5', '3.6', '3.7', '3.8'], + choices=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], ) pypi.add_argument( "--manual-url", - action='store_true', + action="store_true", default=False, - help=("Manually choose source url when more than one urls are present." - "Default is the one with least source size.") + help=( + "Manually choose source url when more than one urls are present." + "Default is the one with least source size." + ), ) pypi.add_argument( "--noarch-python", - action='store_true', + action="store_true", default=False, - help="Creates recipe as noarch python" + help="Creates recipe as noarch python", ) pypi.add_argument( "--setup-options", - action='append', + action="append", default=[], - help='Options to be added to setup.py install in the recipe. ' - 'The same options are passed to setup.py install in both ' - 'the construction of the recipe and in the recipe itself.' - 'For options that include a double-hypen or to pass multiple ' - 'options, use the syntax ' - '--setup-options="--option1 --option-with-arg arg"' + help="Options to be added to setup.py install in the recipe. " + "The same options are passed to setup.py install in both " + "the construction of the recipe and in the recipe itself." + "For options that include a double-hypen or to pass multiple " + "options, use the syntax " + '--setup-options="--option1 --option-with-arg arg"', ) pypi.add_argument( "--pin-numpy", - action='store_true', + action="store_true", help="Ensure that the generated recipe pins the version of numpy" - "to CONDA_NPY." + "to CONDA_NPY.", ) pypi.add_argument( "--extra-specs", - action='append', + action="append", default=[], help="Extra specs for the build environment to extract the skeleton.", ) -def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, manual_url): +def get_download_data( + pypi_data, package, version, is_url, all_urls, noprompt, manual_url +): """ Get at least one valid *source* download URL or fail. @@ -550,72 +623,76 @@ def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, m digest : dict Key is type of checksum, value is the checksum. """ - data = pypi_data['info'] if not is_url else {} + data = pypi_data["info"] if not is_url else {} # PyPI will typically have several downloads (source, wheels) for one # package/version. - urls = [url for url in pypi_data['releases'][version]] if not is_url else [package] + urls = [url for url in pypi_data["releases"][version]] if not is_url else [package] if not is_url and not all_urls: # Try to find source urls - urls = [url for url in urls if url['packagetype'] == 'sdist'] + urls = [url for url in urls if url["packagetype"] == "sdist"] if not urls: # Try harder for a download location - if data.get('download_url'): - urls = [defaultdict(str, {'url': data['download_url']})] - if not urls[0]['url']: + if data.get("download_url"): + urls = [defaultdict(str, {"url": data["download_url"]})] + if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. - sys.exit("Error: Could not build recipe for %s. " - "Could not find any valid urls." % package) - U = parse_url(urls[0]['url']) + sys.exit( + f"Error: Could not build recipe for {package}. " + "Could not find any valid urls." + ) + U = parse_url(urls[0]["url"]) if not U.path: - sys.exit("Error: Could not parse url for %s: %s" % - (package, U)) - urls[0]['filename'] = U.path.rsplit('/')[-1] - fragment = U.fragment or '' + sys.exit(f"Error: Could not parse url for {package}: {U}") + urls[0]["filename"] = U.path.rsplit("/")[-1] + fragment = U.fragment or "" digest = fragment.split("=") else: - sys.exit("Error: No source urls found for %s" % package) + sys.exit(f"Error: No source urls found for {package}") if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % - package) + print(f"More than one source version is available for {package}:") if manual_url: for i, url in enumerate(urls): - print("%d: %s (%s) %s" % (i, url['url'], - human_bytes(url['size']), url['comment_text'])) + print( + "%d: %s (%s) %s" + % (i, url["url"], human_bytes(url["size"]), url["comment_text"]) + ) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") - _, n = min((url['size'], i) - for (i, url) in enumerate(urls)) + _, n = min((url["size"], i) for (i, url) in enumerate(urls)) else: n = 0 if not is_url: # Found a location from PyPI. url = urls[n] - pypiurl = url['url'] - print("Using url {} ({}) for {}.".format(pypiurl, - human_bytes(url['size'] or 0), package)) + pypiurl = url["url"] + print( + "Using url {} ({}) for {}.".format( + pypiurl, human_bytes(url["size"] or 0), package + ) + ) - if url['digests']['sha256']: - digest = ('sha256', url['digests']['sha256']) + if url["digests"]["sha256"]: + digest = ("sha256", url["digests"]["sha256"]) else: # That didn't work, even though as of 7/17/2017 some packages # have a 'digests' entry. # As a last-ditch effort, try for the md5_digest entry. digest = () - filename = url['filename'] or 'package' + filename = url["filename"] or "package" else: # User provided a URL, try to use it. - print("Using url %s" % package) + print(f"Using url {package}") pypiurl = package U = parse_url(package) digest = U.fragment.split("=") # TODO: 'package' won't work with unpack() - filename = U.path.rsplit('/', 1)[-1] or 'package' + filename = U.path.rsplit("/", 1)[-1] or "package" return (data, pypiurl, filename, digest) @@ -632,46 +709,46 @@ def version_compare(package, versions): recipe_dir = abspath(package.lower()) if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) + sys.exit(f"Error: no such directory: {recipe_dir}") m = MetaData(recipe_dir) local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") if local_version not in versions: - sys.exit("Error: %s %s is not available on PyPI." - % (package, local_version)) + sys.exit(f"Error: {package} {local_version} is not available on PyPI.") else: # Comparing normalized versions, displaying non normalized ones - new_versions = versions[:norm_versions.index(local_version)] + new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: - print("Following new versions of %s are avaliable" % (package)) + print(f"Following new versions of {package} are avaliable") for ver in new_versions: print(ver) else: - print("No new version for %s is available" % (package)) + print(f"No new version for {package} is available") sys.exit() def convert_version(version): """Convert version into a pin-compatible format according to PEP440.""" - version_parts = version.split('.') - suffixes = ('post', 'pre') + version_parts = version.split(".") + suffixes = ("post", "pre") if any(suffix in version_parts[-1] for suffix in suffixes): version_parts.pop() # the max pin length is n-1, but in terms of index this is n-2 max_ver_len = len(version_parts) - 2 version_parts[max_ver_len] = int(version_parts[max_ver_len]) + 1 - max_pin = '.'.join(str(v) for v in version_parts[:max_ver_len + 1]) - pin_compatible = ' >={},<{}' .format(version, max_pin) + max_pin = ".".join(str(v) for v in version_parts[: max_ver_len + 1]) + pin_compatible = f" >={version},<{max_pin}" return pin_compatible -MARKER_RE = re.compile(r"(?P^[^=<>!~\s;]+)" - r"\s*" - r"(?P[=!><~]=?\s*[^\s;]+)?" - r"(?:\s*;\s+)?(?P[^=<>!~\s;]+)?" - r"\s*" - r"(?P[=<>!\s]+[^=<>!~\s]+)?" - ) +MARKER_RE = re.compile( + r"(?P^[^=<>!~\s;]+)" + r"\s*" + r"(?P[=!><~]=?\s*[^\s;]+)?" + r"(?:\s*;\s+)?(?P[^=<>!~\s;]+)?" + r"\s*" + r"(?P[=<>!\s]+[^=<>!~\s]+)?" +) def _get_env_marker_operator_and_value(constraint): @@ -702,7 +779,7 @@ def _translate_platform_system_constraint(constraint): def _translate_sys_platform_constraint(constraint): operator, value = _get_env_marker_operator_and_value(constraint) # Only take the "letter" part to translate, e.g., "linux2"->"linux", "win32"->"win". - system = re.match('^[a-z]*', value, re.I)[0] + system = re.match("^[a-z]*", value, re.I)[0] return "{}{}".format("not " if operator == "!=" else "", system) @@ -717,7 +794,7 @@ def env_mark_lookup(env_mark_name, env_mark_constraint): "sys_platform": _translate_sys_platform_constraint, } marker = env_mark_table[env_mark_name](env_mark_constraint) - return ' # [ ' + marker + ' ]' + return " # [ " + marker + " ]" def parse_dep_with_env_marker(dep_str): @@ -727,25 +804,40 @@ def parse_dep_with_env_marker(dep_str): name = " ".join((name, match.group("constraint").replace(" ", ""))) env_mark = "" if match.group("env_mark_name"): - env_mark = env_mark_lookup(match.group("env_mark_name"), - match.group("env_mark_constraint")) + env_mark = env_mark_lookup( + match.group("env_mark_name"), match.group("env_mark_constraint") + ) return name, env_mark -def get_package_metadata(package, metadata, data, output_dir, python_version, all_extras, - recursive, created_recipes, noarch_python, no_prompt, packages, - extra_specs, config, setup_options): - - print("Downloading %s" % package) - print("PyPI URL: ", metadata['pypiurl']) - pkginfo = get_pkginfo(package, - filename=metadata['filename'], - pypiurl=metadata['pypiurl'], - digest=metadata['digest'], - python_version=python_version, - extra_specs=extra_specs, - setup_options=setup_options, - config=config) +def get_package_metadata( + package, + metadata, + data, + output_dir, + python_version, + all_extras, + recursive, + created_recipes, + noarch_python, + no_prompt, + packages, + extra_specs, + config, + setup_options, +): + print(f"Downloading {package}") + print("PyPI URL: ", metadata["pypiurl"]) + pkginfo = get_pkginfo( + package, + filename=metadata["filename"], + pypiurl=metadata["pypiurl"], + digest=metadata["digest"], + python_version=python_version, + extra_specs=extra_specs, + setup_options=setup_options, + config=config, + ) metadata.update(get_entry_points(pkginfo)) @@ -754,34 +846,35 @@ def get_package_metadata(package, metadata, data, output_dir, python_version, al if requires or is_setuptools_enabled(pkginfo): list_deps = get_dependencies(requires, is_setuptools_enabled(pkginfo)) - metadata['build_depends'] = ['pip'] + list_deps + metadata["build_depends"] = ["pip"] + list_deps # Never add setuptools to runtime dependencies. - metadata['run_depends'] = list_deps + metadata["run_depends"] = list_deps if recursive: packages += get_recursive_deps(created_recipes, list_deps, output_dir) - if 'packagename' not in metadata: - metadata['packagename'] = pkginfo['name'].lower() + if "packagename" not in metadata: + metadata["packagename"] = pkginfo["name"].lower() - if metadata['version'] == 'UNKNOWN': - metadata['version'] = pkginfo['version'] + if metadata["version"] == "UNKNOWN": + metadata["version"] = pkginfo["version"] metadata["import_tests"] = get_import_tests(pkginfo, metadata.get("import_tests")) - metadata['tests_require'] = get_tests_require(pkginfo) + metadata["tests_require"] = get_tests_require(pkginfo) metadata["home"] = get_home(pkginfo, data) if not metadata.get("summary"): metadata["summary"] = get_summary(pkginfo) - metadata["summary"] = get_summary(pkginfo) license_name = get_license_name(package, pkginfo, no_prompt, data) metadata["license"] = clean_license_name(license_name) - metadata['license_family'] = guess_license_family(license_name, allowed_license_families) + metadata["license_family"] = guess_license_family( + license_name, allowed_license_families + ) - if 'new_hash_value' in pkginfo: - metadata['digest'] = pkginfo['new_hash_value'] + if "new_hash_value" in pkginfo: + metadata["digest"] = pkginfo["new_hash_value"] def get_recursive_deps(created_recipes, list_deps, output_dir): @@ -813,7 +906,7 @@ def get_dependencies(requires, setuptools_enabled=True): # START :: Copied from conda # These can be removed if we want to drop support for conda <= 4.9.0 def _strip_comment(line): - return line.split('#')[0].rstrip() + return line.split("#")[0].rstrip() def _spec_from_line(line): spec_pat = re.compile( @@ -831,21 +924,23 @@ def _spec_from_line(line): m = spec_pat.match(_strip_comment(line)) if m is None: return None - name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc')) + name, cc, pc = (m.group("name").lower(), m.group("cc"), m.group("pc")) if cc: - return name + cc.replace('=', ' ') + return name + cc.replace("=", " ") elif pc: - if pc.startswith('~= '): - assert pc.count('~=') == 1, \ - f"Overly complex 'Compatible release' spec not handled {line}" - assert pc.count('.'), f"No '.' in 'Compatible release' version {line}" - ver = pc.replace('~= ', '') - ver2 = '.'.join(ver.split('.')[:-1]) + '.*' - return name + ' >=' + ver + ',==' + ver2 + if pc.startswith("~= "): + assert ( + pc.count("~=") == 1 + ), f"Overly complex 'Compatible release' spec not handled {line}" + assert pc.count("."), f"No '.' in 'Compatible release' version {line}" + ver = pc.replace("~= ", "") + ver2 = ".".join(ver.split(".")[:-1]) + ".*" + return name + " >=" + ver + ",==" + ver2 else: - return name + ' ' + pc.replace(' ', '') + return name + " " + pc.replace(" ", "") else: return name + # END :: Copied from conda list_deps = ["setuptools"] if setuptools_enabled else [] @@ -857,7 +952,7 @@ def _spec_from_line(line): # or a multiline requirements string... for dep in dep_text: # ... and may also contain comments... - dep = dep.split('#')[0].strip() + dep = dep.split("#")[0].strip() if not dep: continue @@ -873,20 +968,22 @@ def _spec_from_line(line): # if spec != spec2: # print("Disagreement on PEP440 'Compatible release' {} vs {}".format(spec, spec2)) spec = spec_from_line(dep) - if '~=' in dep_orig: + if "~=" in dep_orig: spec = None if spec is None: - if '~=' in dep_orig: + if "~=" in dep_orig: log = logging.getLogger(__name__) - log.warning("Your conda is too old to handle ~= PEP440 'Compatible versions', " - "using copied implementation.") + log.warning( + "Your conda is too old to handle ~= PEP440 'Compatible versions', " + "using copied implementation." + ) spec = _spec_from_line(dep_orig) if spec is None: - sys.exit("Error: Could not parse: %s" % dep) + sys.exit(f"Error: Could not parse: {dep}") if marker: - spec = ' '.join((spec, marker)) + spec = " ".join((spec, marker)) list_deps.append(spec) return list_deps @@ -904,29 +1001,25 @@ def get_import_tests(pkginfo, import_tests_metada=""): olddeps = [] if import_tests_metada != "PLACEHOLDER": - olddeps = [ - x for x in import_tests_metada.split() if x != "-" - ] + olddeps = [x for x in import_tests_metada.split() if x != "-"] return sorted(set(olddeps) | set(pkginfo["packages"])) def get_tests_require(pkginfo): - return sorted( - spec_from_line(pkg) for pkg in ensure_list(pkginfo['tests_require']) - ) + return sorted(spec_from_line(pkg) for pkg in ensure_list(pkginfo["tests_require"])) def get_home(pkginfo, data=None): default_home = "The package home page" - if pkginfo.get('home'): - return pkginfo['home'] + if pkginfo.get("home"): + return pkginfo["home"] if data: return data.get("home", default_home) return default_home def get_summary(pkginfo): - return pkginfo.get("summary", "Summary of the package").replace('"', r'\"') + return pkginfo.get("summary", "Summary of the package").replace('"', r"\"") def get_license_name(package, pkginfo, no_prompt=False, data=None): @@ -940,7 +1033,7 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_classifier = "License :: OSI Approved :: " data_classifier = data.get("classifiers", []) if data else [] - pkg_classifier = pkginfo.get('classifiers', data_classifier) + pkg_classifier = pkginfo.get("classifiers", data_classifier) pkg_classifier = pkg_classifier if pkg_classifier else data_classifier licenses = [ @@ -950,23 +1043,23 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): ] if licenses: - return ' or '.join(licenses) + return " or ".join(licenses) - if pkginfo.get('license'): - license_name = pkginfo['license'] - elif data and 'license' in data: - license_name = data['license'] + if pkginfo.get("license"): + license_name = pkginfo["license"] + elif data and "license" in data: + license_name = data["license"] else: license_name = None if license_name: if no_prompt: return license_name - elif '\n' not in license_name: - print('Using "%s" for the license' % license_name) + elif "\n" not in license_name: + print(f'Using "{license_name}" for the license') else: # Some projects put the whole license text in this field - print("This is the license for %s" % package) + print(f"This is the license for {package}") print() print(license_name) print() @@ -975,8 +1068,8 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_name = "UNKNOWN" else: license_name = input( - "No license could be found for %s on PyPI or in the source. " - "What license should I use? " % package + f"No license could be found for {package} on PyPI or in the source. " + "What license should I use? " ) return license_name @@ -986,7 +1079,7 @@ def clean_license_name(license_name): :param str license_name: Receives the license name :return str: Return a string without the word ``license`` """ - return re.subn(r'(.*)\s+license', r'\1', license_name, flags=re.IGNORECASE)[0] + return re.subn(r"(.*)\s+license", r"\1", license_name, flags=re.IGNORECASE)[0] def get_entry_points(pkginfo): @@ -994,7 +1087,7 @@ def get_entry_points(pkginfo): :param pkginfo: :return dict: """ - entry_points = pkginfo.get('entry_points') + entry_points = pkginfo.get("entry_points") if not entry_points: return {} @@ -1011,26 +1104,22 @@ def get_entry_points(pkginfo): else: entry_points = {} for section in _config.sections(): - if section in ['console_scripts', 'gui_scripts']: + if section in ["console_scripts", "gui_scripts"]: entry_points[section] = [ - f'{option}={_config.get(section, option)}' + f"{option}={_config.get(section, option)}" for option in _config.options(section) ] if isinstance(entry_points, dict): - console_script = convert_to_flat_list( - entry_points.get('console_scripts', []) - ) - gui_scripts = convert_to_flat_list( - entry_points.get('gui_scripts', []) - ) + console_script = convert_to_flat_list(entry_points.get("console_scripts", [])) + gui_scripts = convert_to_flat_list(entry_points.get("gui_scripts", [])) # TODO: Use pythonw for gui scripts entry_list = console_script + gui_scripts if entry_list: return { "entry_points": entry_list, - "test_commands": make_entry_tests(entry_list) + "test_commands": make_entry_tests(entry_list), } else: print("WARNING: Could not add entry points. They were:") @@ -1048,7 +1137,11 @@ def convert_to_flat_list(var_scripts): """ if isinstance(var_scripts, str): var_scripts = [var_scripts] - elif var_scripts and isinstance(var_scripts, list) and isinstance(var_scripts[0], list): + elif ( + var_scripts + and isinstance(var_scripts, list) + and isinstance(var_scripts[0], list) + ): var_scripts = [item for sublist in [s for s in var_scripts] for item in sublist] return var_scripts @@ -1064,28 +1157,31 @@ def is_setuptools_enabled(pkginfo): # We have *other* kinds of entry-points so we need # setuptools at run-time - if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: + if set(entry_points.keys()) - {"console_scripts", "gui_scripts"}: return True return False def valid(name): - if (re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) and not keyword.iskeyword(name)): + if re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) and not keyword.iskeyword(name): return name else: - return '' + return "" def unpack(src_path, tempdir): if src_path.lower().endswith(decompressible_exts): tar_xf(src_path, tempdir) else: - raise Exception("not a valid source: %s" % src_path) + raise Exception(f"not a valid source: {src_path}") def get_dir(tempdir): - lst = [fn for fn in listdir(tempdir) if not fn.startswith('.') and - isdir(join(tempdir, fn))] + lst = [ + fn + for fn in listdir(tempdir) + if not fn.startswith(".") and isdir(join(tempdir, fn)) + ] if len(lst) == 1: dir_path = join(tempdir, lst[0]) if isdir(dir_path): @@ -1097,31 +1193,31 @@ def get_dir(tempdir): def get_requirements(package, pkginfo, all_extras=True): # Look for package[extra,...] features spec: - match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) + match_extras = re.match(r"^([^[]+)\[([^]]+)\]$", package) if match_extras: package, extras = match_extras.groups() - extras = extras.split(',') + extras = extras.split(",") else: extras = [] # Extract requested extra feature requirements... if all_extras: - extras_require = list(pkginfo['extras_require'].values()) + extras_require = list(pkginfo["extras_require"].values()) else: try: - extras_require = [pkginfo['extras_require'][x] for x in extras] + extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) + sys.exit("Error: Invalid extra features: [{}]".format(",".join(extras))) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator - version_marker = re.compile(r'^:python_version(<|<=|!=|==|>=|>)(.+)$') - for extra in pkginfo['extras_require']: + version_marker = re.compile(r"^:python_version(<|<=|!=|==|>=|>)(.+)$") + for extra in pkginfo["extras_require"]: match_ver_mark = version_marker.match(extra) if match_ver_mark: op, ver = match_ver_mark.groups() try: - ver_tuple = tuple(int(x) for x in ver.strip('\'"').split(".")) + ver_tuple = tuple(int(x) for x in ver.strip("'\"").split(".")) except ValueError: pass # bad match; abort else: @@ -1138,11 +1234,11 @@ def get_requirements(package, pkginfo, all_extras=True): else: # op == ">": satisfies_ver = sys.version_info > ver_tuple if satisfies_ver: - extras_require += pkginfo['extras_require'][extra] + extras_require += pkginfo["extras_require"][extra] # ... and collect all needed requirement specs in a single list: requires = [] - for specs in [pkginfo.get('install_requires', "")] + extras_require: + for specs in [pkginfo.get("install_requires", "")] + extras_require: if isinstance(specs, str): requires.append(specs) else: @@ -1151,15 +1247,23 @@ def get_requirements(package, pkginfo, all_extras=True): return requires -def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, config, - setup_options): +def get_pkginfo( + package, + filename, + pypiurl, + digest, + python_version, + extra_specs, + config, + setup_options, +): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. - tempdir = mkdtemp('conda_skeleton_' + filename) + tempdir = mkdtemp("conda_skeleton_" + filename) if not isdir(config.src_cache): makedirs(config.src_cache) @@ -1170,39 +1274,51 @@ def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, # Download it to the build source cache. That way, you have # it. download_path = join(config.src_cache, filename) - if not isfile(download_path) or \ - hashsum_file(download_path, hash_type) != hash_value: + if ( + not isfile(download_path) + or compute_sum(download_path, hash_type) != hash_value + ): download(pypiurl, join(config.src_cache, filename)) - if hashsum_file(download_path, hash_type) != hash_value: - raise RuntimeError(' Download of {} failed' - ' checksum type {} expected value {}. Please' - ' try again.'.format(package, hash_type, hash_value)) + if compute_sum(download_path, hash_type) != hash_value: + raise RuntimeError( + f" Download of {package} failed" + f" checksum type {hash_type} expected value {hash_value}. Please" + " try again." + ) else: print("Using cached download") # Calculate the preferred hash type here if necessary. # Needs to be done in this block because this is where we have # access to the source file. - if hash_type != 'sha256': - new_hash_value = hashsum_file(download_path, 'sha256') + if hash_type != "sha256": + new_hash_value = compute_sum(download_path, "sha256") else: - new_hash_value = '' + new_hash_value = "" - print("Unpacking %s..." % package) + print(f"Unpacking {package}...") unpack(join(config.src_cache, filename), tempdir) print("done") - print("working in %s" % tempdir) + print(f"working in {tempdir}") src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy - run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config, - setup_options=setup_options) + run_setuppy( + src_dir, + tempdir, + python_version, + extra_specs=extra_specs, + config=config, + setup_options=setup_options, + ) try: - with open(join(tempdir, 'pkginfo.yaml')) as fn: + with open(join(tempdir, "pkginfo.yaml")) as fn: pkg_info = yaml.safe_load(fn) except OSError: - print("WARNING: the pkginfo.yaml file was absent, falling back to pkginfo.SDist") + print( + "WARNING: the pkginfo.yaml file was absent, falling back to pkginfo.SDist" + ) pkg_info = pkginfo.SDist(download_path).__dict__ if new_hash_value: - pkg_info['new_hash_value'] = ('sha256', new_hash_value) + pkg_info["new_hash_value"] = ("sha256", new_hash_value) finally: rm_rf(tempdir) @@ -1210,74 +1326,101 @@ def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_options): - ''' + """ Patch distutils and then run setup.py in a subprocess. :param src_dir: Directory containing the source code :type src_dir: str :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str - ''' - # TODO: we could make everyone's lives easier if we include packaging here, because setuptools - # needs it in recent versions. At time of writing, it is not a package in defaults, so this - # actually breaks conda-build right now. Omit it until packaging is on defaults. - # specs = ['python %s*' % python_version, 'pyyaml', 'setuptools', 'six', 'packaging', 'appdirs'] + """ subdir = config.host_subdir - specs = [f'python {python_version}*', - 'pip', 'pyyaml', 'setuptools'] + (['m2-patch', 'm2-gcc-libs'] if config.host_subdir.startswith('win') - else ['patch']) + specs = [ + f"python {python_version}*", + "pip", + "ruamel.yaml", + "setuptools", + ] + ( + ["m2-patch", "m2-gcc-libs"] + if config.host_subdir.startswith("win") + else ["patch"] + ) with open(os.path.join(src_dir, "setup.py")) as setup: text = setup.read() - if 'import numpy' in text or 'from numpy' in text: - specs.append('numpy') + if "import numpy" in text or "from numpy" in text: + specs.append("numpy") specs.extend(extra_specs) rm_rf(config.host_prefix) - create_env(config.host_prefix, specs_or_actions=specs, env='host', - subdir=subdir, clear_cache=False, config=config) - stdlib_dir = join(config.host_prefix, - 'Lib' if sys.platform == 'win32' - else 'lib/python%s' % python_version) - - patch = join(temp_dir, 'pypi-distutils.patch') - with open(patch, 'wb') as f: - f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')).encode('utf-8')) - - if exists(join(stdlib_dir, 'distutils', 'core.py-copy')): - rm_rf(join(stdlib_dir, 'distutils', 'core.py')) - copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), - join(stdlib_dir, 'distutils', 'core.py')) - # Avoid race conditions. Invalidate the cache. - rm_rf(join(stdlib_dir, 'distutils', '__pycache__', - 'core.cpython-%s%s.pyc' % sys.version_info[:2])) - rm_rf(join(stdlib_dir, 'distutils', '__pycache__', - 'core.cpython-%s%s.pyo' % sys.version_info[:2])) - else: - copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir, - 'distutils', 'core.py-copy')) - apply_patch(join(stdlib_dir, 'distutils'), patch, config=config) + create_env( + config.host_prefix, + specs_or_precs=specs, + env="host", + subdir=subdir, + clear_cache=False, + config=config, + ) + stdlib_dir = join( + config.host_prefix, + "Lib" if on_win else f"lib/python{python_version}", + ) + + patch = join(temp_dir, "pypi-distutils.patch") + with open(patch, "wb") as f: + f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\")).encode("utf-8")) + + # distutils deprecated in Python 3.10+, removed in Python 3.12+ + distutils = join(stdlib_dir, "distutils") + if isdir(distutils): + if exists(join(distutils, "core.py-copy")): + rm_rf(join(distutils, "core.py")) + copy2( + join(distutils, "core.py-copy"), + join(distutils, "core.py"), + ) + # Avoid race conditions. Invalidate the cache. + rm_rf( + join( + distutils, + "__pycache__", + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyc", + ) + ) + rm_rf( + join( + distutils, + "__pycache__", + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyo", + ) + ) + else: + copy2( + join(distutils, "core.py"), + join(distutils, "core.py-copy"), + ) + apply_patch(distutils, patch, config=config) - vendored = join(stdlib_dir, "site-packages", "setuptools", "_distutils") - if os.path.isdir(vendored): - apply_patch(vendored, patch, config=config) + setuptools = join(stdlib_dir, "site-packages", "setuptools", "_distutils") + if isdir(setuptools): + apply_patch(setuptools, patch, config=config) # Save PYTHONPATH for later env = os.environ.copy() - if 'PYTHONPATH' in env: - env['PYTHONPATH'] = str(src_dir + ':' + env['PYTHONPATH']) + if "PYTHONPATH" in env: + env["PYTHONPATH"] = str(src_dir + ":" + env["PYTHONPATH"]) else: - env['PYTHONPATH'] = str(src_dir) + env["PYTHONPATH"] = str(src_dir) cwd = getcwd() chdir(src_dir) - cmdargs = [config.host_python, 'setup.py', 'install'] + cmdargs = [config.host_python, "setup.py", "install"] cmdargs.extend(setup_options) try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print('$PYTHONPATH = %s' % env['PYTHONPATH']) - sys.exit('Error: command failed: %s' % ' '.join(cmdargs)) + print("$PYTHONPATH = {}".format(env["PYTHONPATH"])) + sys.exit("Error: command failed: {}".format(" ".join(cmdargs))) finally: chdir(cwd) @@ -1285,6 +1428,6 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op def make_entry_tests(entry_list): tests = [] for entry_point in entry_list: - entry = entry_point.partition('=')[0].strip() + entry = entry_point.partition("=")[0].strip() tests.append(entry + " --help") return tests diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index 04f73d7355..d44477171f 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,32 +1,33 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse -from conda_build.source import download_to_cache -from conda_build.license_family import guess_license_family -from copy import copy -try: - import cPickle as pickle -except: - import pickle as pickle import gzip import hashlib -from os import (chmod, makedirs) -from os.path import (basename, dirname, exists, join, splitext) +import pickle import re +from copy import copy +from os import chmod, makedirs +from os.path import basename, dirname, exists, join, splitext from textwrap import wrap +from typing import TYPE_CHECKING +from urllib.request import urlopen from xml.etree import ElementTree as ET -from .cran import yaml_quote_string +from ..license_family import guess_license_family +from ..source import download_to_cache +from ..utils import ensure_list +from .cran import yaml_quote_string -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen +if TYPE_CHECKING: + from typing import Iterable + from ..config import Config # This is used in two places -default_architecture = 'x86_64' -default_distro = 'centos6' +default_architecture = "x86_64" +default_distro = "centos6" RPM_META = """\ package: @@ -79,92 +80,104 @@ """ -CDTs = dict({'centos5': {'dirname': 'centos5', - 'short_name': 'cos5', - 'base_url': 'http://vault.centos.org/5.11/os/{base_architecture}/CentOS/', - 'sbase_url': 'http://vault.centos.org/5.11/os/Source/', - 'repomd_url': 'http://vault.centos.org/5.11/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{architecture}-conda_cos5-linux-gnu', - 'host_subdir': 'linux-{bits}', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el5.{architecture}', - 'checksummer': hashlib.sha1, - 'checksummer_name': "sha1", - 'macros': {}}, - 'centos6': {'dirname': 'centos6', - 'short_name': 'cos6', - 'base_url': 'http://vault.centos.org/centos/6.10/os/{base_architecture}/CentOS/', # noqa - 'sbase_url': 'http://vault.centos.org/6.10/os/Source/SPackages/', - 'repomd_url': 'http://vault.centos.org/centos/6.10/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{architecture}-conda_cos6-linux-gnu', - 'host_subdir': 'linux-{bits}', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el6.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - # Some macros are defined in /etc/rpm/macros.* but I cannot find where - # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" - # gives nothing nor does rpm --showrc | grep gdk - 'macros': {'pyver': '2.6.6', - 'gdk_pixbuf_base_version': '2.24.1'}}, - 'centos7': {'dirname': 'centos7', - 'short_name': 'cos7', - 'base_url': 'http://vault.centos.org/altarch/7/os/{base_architecture}/CentOS/', # noqa - 'sbase_url': 'http://vault.centos.org/7.7.1908/os/Source/SPackages/', - 'repomd_url': 'http://vault.centos.org/altarch/7/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{gnu_architecture}-conda_cos7-linux-gnu', - 'host_subdir': 'linux-ppc64le', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el7.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - # Some macros are defined in /etc/rpm/macros.* but I cannot find where - # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" - # gives nothing nor does rpm --showrc | grep gdk - 'macros': {'pyver': '2.6.6', - 'gdk_pixbuf_base_version': '2.24.1'}}, - 'clefos': {'dirname': 'clefos', - 'short_name': 'cos7', - 'base_url': 'http://download.sinenomine.net/clefos/7/os/{base_architecture}/', # noqa - 'sbase_url': 'http://download.sinenomine.net/clefos/7/source/srpms/', # noqa - 'repomd_url': 'http://download.sinenomine.net/clefos/7/os/repodata/repomd.xml', # noqa - 'host_machine': '{gnu_architecture}-conda-cos7-linux-gnu', - 'host_subdir': 'linux-s390x', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el7.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {'pyver': '2.7.5', - 'gdk_pixbuf_base_version': '2.36.2'}}, - 'suse_leap_rpi3': {'dirname': 'suse_leap_rpi3', - 'short_name': 'slrpi3', - # I cannot locate the src.rpms for OpenSUSE leap. The existence - # of this key tells this code to ignore missing src rpms but we - # should *never* release binaries we do not have the sources for. - 'allow_missing_sources': True, - 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.3-Current/repo/oss/suse/repodata/repomd.xml', # noqa - 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.3-Current/repo/oss/suse/{architecture}/', # noqa - 'sbase_url': 'http://download.opensuse.org/ports/{architecture}/source/factory/repo/oss/suse/src/', # noqa - # I even tried an older release but it was just as bad: - # 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/repodata/repomd.xml', # noqa - # 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.2/repo/oss/suse/{architecture}/', # noqa - # 'sbase_url': 'http://download.opensuse.org/source/distribution/leap/42.2/repo/oss/suse/src/', # noqa - 'host_machine': 'aarch64-conda_rpi3-linux-gnueabi', - 'host_subdir': 'linux-aarch64', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': '{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {}}, - 'raspbian_rpi2': {'dirname': 'raspbian_rpi2', - 'cdt_short_name': 'rrpi2', - 'host_machine': 'armv7a-conda_rpi2-linux-gnueabi', - 'host_subdir': 'armv7a-32', - 'fname_architecture': '{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {}}, - }) +CDTs = dict( + { + "centos5": { + "dirname": "centos5", + "short_name": "cos5", + "base_url": "http://vault.centos.org/5.11/os/{base_architecture}/CentOS/", + "sbase_url": "http://vault.centos.org/5.11/os/Source/", + "repomd_url": "http://vault.centos.org/5.11/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{architecture}-conda_cos5-linux-gnu", + "host_subdir": "linux-{bits}", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el5.{architecture}", + "checksummer": hashlib.sha1, + "checksummer_name": "sha1", + "macros": {}, + }, + "centos6": { + "dirname": "centos6", + "short_name": "cos6", + "base_url": "http://vault.centos.org/centos/6.10/os/{base_architecture}/CentOS/", # noqa + "sbase_url": "http://vault.centos.org/6.10/os/Source/SPackages/", + "repomd_url": "http://vault.centos.org/centos/6.10/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{architecture}-conda_cos6-linux-gnu", + "host_subdir": "linux-{bits}", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el6.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + # Some macros are defined in /etc/rpm/macros.* but I cannot find where + # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" + # gives nothing nor does rpm --showrc | grep gdk + "macros": {"pyver": "2.6.6", "gdk_pixbuf_base_version": "2.24.1"}, + }, + "centos7": { + "dirname": "centos7", + "short_name": "cos7", + "base_url": "http://vault.centos.org/altarch/7/os/{base_architecture}/CentOS/", # noqa + "sbase_url": "http://vault.centos.org/7.7.1908/os/Source/SPackages/", + "repomd_url": "http://vault.centos.org/altarch/7/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{gnu_architecture}-conda_cos7-linux-gnu", + "host_subdir": "linux-ppc64le", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el7.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + # Some macros are defined in /etc/rpm/macros.* but I cannot find where + # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" + # gives nothing nor does rpm --showrc | grep gdk + "macros": {"pyver": "2.6.6", "gdk_pixbuf_base_version": "2.24.1"}, + }, + "clefos": { + "dirname": "clefos", + "short_name": "cos7", + "base_url": "http://download.sinenomine.net/clefos/7/os/{base_architecture}/", # noqa + "sbase_url": "http://download.sinenomine.net/clefos/7/source/srpms/", # noqa + "repomd_url": "http://download.sinenomine.net/clefos/7/os/repodata/repomd.xml", # noqa + "host_machine": "{gnu_architecture}-conda-cos7-linux-gnu", + "host_subdir": "linux-s390x", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el7.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {"pyver": "2.7.5", "gdk_pixbuf_base_version": "2.36.2"}, + }, + "suse_leap_rpi3": { + "dirname": "suse_leap_rpi3", + "short_name": "slrpi3", + # I cannot locate the src.rpms for OpenSUSE leap. The existence + # of this key tells this code to ignore missing src rpms but we + # should *never* release binaries we do not have the sources for. + "allow_missing_sources": True, + "repomd_url": "http://download.opensuse.org/ports/aarch64/distribution/leap/42.3-Current/repo/oss/suse/repodata/repomd.xml", # noqa + "base_url": "http://download.opensuse.org/ports/{architecture}/distribution/leap/42.3-Current/repo/oss/suse/{architecture}/", # noqa + "sbase_url": "http://download.opensuse.org/ports/{architecture}/source/factory/repo/oss/suse/src/", # noqa + # I even tried an older release but it was just as bad: + # 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/repodata/repomd.xml', # noqa + # 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.2/repo/oss/suse/{architecture}/', # noqa + # 'sbase_url': 'http://download.opensuse.org/source/distribution/leap/42.2/repo/oss/suse/src/', # noqa + "host_machine": "aarch64-conda_rpi3-linux-gnueabi", + "host_subdir": "linux-aarch64", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {}, + }, + "raspbian_rpi2": { + "dirname": "raspbian_rpi2", + "cdt_short_name": "rrpi2", + "host_machine": "armv7a-conda_rpi2-linux-gnueabi", + "host_subdir": "armv7a-32", + "fname_architecture": "{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {}, + }, + } +) def package_exists(package_name): @@ -175,29 +188,29 @@ def package_exists(package_name): def cache_file(src_cache, url, fn=None, checksummer=hashlib.sha256): if fn: - source = dict({'url': url, 'fn': fn}) + source = dict({"url": url, "fn": fn}) else: - source = dict({'url': url}) - cached_path, _ = download_to_cache(src_cache, '', source) + source = dict({"url": url}) + cached_path, _ = download_to_cache(src_cache, "", source) csum = checksummer() - csum.update(open(cached_path, 'rb').read()) + csum.update(open(cached_path, "rb").read()) csumstr = csum.hexdigest() return cached_path, csumstr def rpm_filename_split(rpmfilename): base, _ = splitext(rpmfilename) - release_platform = base.split('-')[-1] - parts = release_platform.split('.') + release_platform = base.split("-")[-1] + parts = release_platform.split(".") if len(parts) == 2: release, platform = parts[0], parts[1] elif len(parts) > 2: - release, platform = '.'.join(parts[0:len(parts) - 1]), '.'.join(parts[-1:]) + release, platform = ".".join(parts[0 : len(parts) - 1]), ".".join(parts[-1:]) else: print(f"ERROR: Cannot figure out the release and platform for {base}") - name_version = base.split('-')[0:-1] + name_version = base.split("-")[0:-1] version = name_version[-1] - rpm_name = '-'.join(name_version[0:len(name_version) - 1]) + rpm_name = "-".join(name_version[0 : len(name_version) - 1]) return rpm_name, version, release, platform @@ -208,7 +221,7 @@ def rpm_split_url_and_cache(rpm_url, src_cache): def rpm_filename_generate(rpm_name, version, release, platform): - return f'{rpm_name}-{version}-{release}.{platform}.rpm' + return f"{rpm_name}-{version}-{release}.{platform}.rpm" def rpm_url_generate(url_dirname, rpm_name, version, release, platform, src_cache): @@ -217,14 +230,14 @@ def rpm_url_generate(url_dirname, rpm_name, version, release, platform, src_cach """ result = rpm_filename_generate(rpm_name, version, release, platform) url = join(url_dirname, result) - path, _ = download_to_cache(src_cache, '', dict({'url': url})) + path, _ = download_to_cache(src_cache, "", dict({"url": url})) assert path, f"Failed to cache generated RPM url {result}" return url def find_repo_entry_and_arch(repo_primary, architectures, depend): - dep_name = depend['name'] - found_package_name = '' + dep_name = depend["name"] + found_package_name = "" try: # Try direct lookup first. found_package = repo_primary[dep_name] @@ -234,16 +247,18 @@ def find_repo_entry_and_arch(repo_primary, architectures, depend): for name, package in repo_primary.items(): for arch in architectures: if arch in package: - if 'provides' in package[arch]: - for provide in package[arch]['provides']: - if provide['name'] == dep_name: + if "provides" in package[arch]: + for provide in package[arch]["provides"]: + if provide["name"] == dep_name: print(f"Found it in {name}") found_package = package found_package_name = name break - if found_package_name == '': - print(f"WARNING: Did not find package called (or another one providing) {dep_name}") # noqa + if found_package_name == "": + print( + f"WARNING: Did not find package called (or another one providing) {dep_name}" + ) # noqa return None, None, None chosen_arch = None @@ -257,12 +272,15 @@ def find_repo_entry_and_arch(repo_primary, architectures, depend): return entry, found_package_name, chosen_arch -str_flags_to_conda_version_spec = dict({'LT': '<', - 'LE': '<=', - 'EQ': '==', - 'GE': '>=', - 'GT': '>', - }) +str_flags_to_conda_version_spec = dict( + { + "LT": "<", + "LE": "<=", + "EQ": "==", + "GE": ">=", + "GT": ">", + } +) def dictify(r, root=True): @@ -279,70 +297,76 @@ def dictify(r, root=True): def dictify_pickled(xml_file, src_cache, dict_massager=None, cdt=None): - pickled = xml_file + '.p' + pickled = xml_file + ".p" if exists(pickled): - return pickle.load(open(pickled, 'rb')) - with open(xml_file, encoding='utf-8') as xf: + return pickle.load(open(pickled, "rb")) + with open(xml_file, encoding="utf-8") as xf: xmlstring = xf.read() # Remove the global namespace. - xmlstring = re.sub(r'\sxmlns="[^"]+"', r'', xmlstring, count=1) + xmlstring = re.sub(r'\sxmlns="[^"]+"', r"", xmlstring, count=1) # Replace sub-namespaces with their names. xmlstring = re.sub(r'\sxmlns:([a-zA-Z]*)="[^"]+"', r' xmlns:\1="\1"', xmlstring) - root = ET.fromstring(xmlstring.encode('utf-8')) + root = ET.fromstring(xmlstring.encode("utf-8")) result = dictify(root) if dict_massager: result = dict_massager(result, src_cache, cdt) - pickle.dump(result, open(pickled, 'wb')) + pickle.dump(result, open(pickled, "wb")) return result def get_repo_dict(repomd_url, data_type, dict_massager, cdt, src_cache): xmlstring = urlopen(repomd_url).read() # Remove the default namespace definition (xmlns="http://some/namespace") - xmlstring = re.sub(br'\sxmlns="[^"]+"', b'', xmlstring, count=1) + xmlstring = re.sub(rb'\sxmlns="[^"]+"', b"", xmlstring, count=1) repomd = ET.fromstring(xmlstring) for child in repomd.findall(f"*[@type='{data_type}']"): open_csum = child.findall("open-checksum")[0].text xml_file = join(src_cache, open_csum) try: - xml_file, xml_csum = cache_file(src_cache, xml_file, None, cdt['checksummer']) + xml_file, xml_csum = cache_file( + src_cache, xml_file, None, cdt["checksummer"] + ) except: csum = child.findall("checksum")[0].text - location = child.findall("location")[0].attrib['href'] - xmlgz_file = dirname(dirname(repomd_url)) + '/' + location - cached_path, cached_csum = cache_file(src_cache, xmlgz_file, - None, cdt['checksummer']) - assert csum == cached_csum, "Checksum for {} does not match value in {}".format( - xmlgz_file, repomd_url) - with gzip.open(cached_path, 'rb') as gz: + location = child.findall("location")[0].attrib["href"] + xmlgz_file = dirname(dirname(repomd_url)) + "/" + location + cached_path, cached_csum = cache_file( + src_cache, xmlgz_file, None, cdt["checksummer"] + ) + assert ( + csum == cached_csum + ), f"Checksum for {xmlgz_file} does not match value in {repomd_url}" + with gzip.open(cached_path, "rb") as gz: xml_content = gz.read() - xml_csum = cdt['checksummer']() + xml_csum = cdt["checksummer"]() xml_csum.update(xml_content) xml_csum = xml_csum.hexdigest() if xml_csum == open_csum: - with open(xml_file, 'wb') as xml: + with open(xml_file, "wb") as xml: xml.write(xml_content) else: - print(f"ERROR: Checksum of uncompressed file {xmlgz_file} does not match") # noqa + print( + f"ERROR: Checksum of uncompressed file {xmlgz_file} does not match" + ) # noqa return dictify_pickled(xml_file, src_cache, dict_massager, cdt) return dict({}) def massage_primary_requires(requires, cdt): for require in requires: - require['name'] = require['name'] - if 'flags' in require: - require['flags'] = str_flags_to_conda_version_spec[require['flags']] + require["name"] = require["name"] + if "flags" in require: + require["flags"] = str_flags_to_conda_version_spec[require["flags"]] else: - require['flags'] = None - if 'ver' in require: - if '%' in require['ver']: - require['ver'] = require['ver'].replace('%', '') - if not require['ver'].startswith('{'): - require['ver'] = '{' + require['ver'] - if not require['ver'].endswith('}'): - require['ver'] = require['ver'] + '}' - require['ver'] = require['ver'].format(**cdt['macros']) + require["flags"] = None + if "ver" in require: + if "%" in require["ver"]: + require["ver"] = require["ver"].replace("%", "") + if not require["ver"].startswith("{"): + require["ver"] = "{" + require["ver"] + if not require["ver"].endswith("}"): + require["ver"] = require["ver"] + "}" + require["ver"] = require["ver"].format(**cdt["macros"]) return requires @@ -359,51 +383,55 @@ def massage_primary(repo_primary, src_cache, cdt): """ new_dict = dict({}) - for package in repo_primary['metadata']['package']: - name = package['name'][0]['_text'] - arch = package['arch'][0]['_text'] - if arch == 'src': + for package in repo_primary["metadata"]["package"]: + name = package["name"][0]["_text"] + arch = package["arch"][0]["_text"] + if arch == "src": continue - checksum = package['checksum'][0]['_text'] - source = package['format'][0]['{rpm}sourcerpm'][0]['_text'] + checksum = package["checksum"][0]["_text"] + source = package["format"][0]["{rpm}sourcerpm"][0]["_text"] # If you need to check if the sources exist (perhaps you've got the source URL wrong # or the distro has forgotten to copy them?): # import requests # sbase_url = cdt['sbase_url'] # surl = sbase_url + source # print("{} {}".format(requests.head(surl).status_code, surl)) - location = package['location'][0]['href'] - version = package['version'][0] - summary = package['summary'][0]['_text'] + location = package["location"][0]["href"] + version = package["version"][0] + summary = package["summary"][0]["_text"] try: - description = package['description'][0]['_text'] + description = package["description"][0]["_text"] except: description = "NA" - if '_text' in package['url'][0]: - url = package['url'][0]['_text'] + if "_text" in package["url"][0]: + url = package["url"][0]["_text"] else: - url = '' - license = package['format'][0]['{rpm}license'][0]['_text'] + url = "" + license = package["format"][0]["{rpm}license"][0]["_text"] try: - provides = package['format'][0]['{rpm}provides'][0]['{rpm}entry'] + provides = package["format"][0]["{rpm}provides"][0]["{rpm}entry"] provides = massage_primary_requires(provides, cdt) except: provides = [] try: - requires = package['format'][0]['{rpm}requires'][0]['{rpm}entry'] + requires = package["format"][0]["{rpm}requires"][0]["{rpm}entry"] requires = massage_primary_requires(requires, cdt) except: requires = [] - new_package = dict({'checksum': checksum, - 'location': location, - 'home': url, - 'source': source, - 'version': version, - 'summary': yaml_quote_string(summary), - 'description': description, - 'license': license, - 'provides': provides, - 'requires': requires}) + new_package = dict( + { + "checksum": checksum, + "location": location, + "home": url, + "source": source, + "version": version, + "summary": yaml_quote_string(summary), + "description": description, + "license": license, + "provides": provides, + "requires": requires, + } + ) if name in new_dict: if arch in new_dict[name]: print(f"WARNING: Duplicate packages exist for {name} for arch {arch}") @@ -414,41 +442,48 @@ def massage_primary(repo_primary, src_cache, cdt): def valid_depends(depends): - name = depends['name'] - str_flags = depends['flags'] - if (not name.startswith('rpmlib(') and not - name.startswith('config(') and not - name.startswith('pkgconfig(') and not - name.startswith('/') and - name != 'rtld(GNU_HASH)' and - '.so' not in name and - '(' not in name and - str_flags): + name = depends["name"] + str_flags = depends["flags"] + if ( + not name.startswith("rpmlib(") + and not name.startswith("config(") + and not name.startswith("pkgconfig(") + and not name.startswith("/") + and name != "rtld(GNU_HASH)" + and ".so" not in name + and "(" not in name + and str_flags + ): return True return False def remap_license(rpm_license): - mapping = {'lgplv2+': 'LGPL (>= 2)', - 'gplv2+': 'GPL (>= 2)', - 'public domain (uncopyrighted)': 'Public-Domain', - 'public domain': 'Public-Domain', - 'mit/x11': 'MIT', - 'the open group license': 'The Open Group License'} + mapping = { + "lgplv2+": "LGPL (>= 2)", + "gplv2+": "GPL (>= 2)", + "public domain (uncopyrighted)": "Public-Domain", + "public domain": "Public-Domain", + "mit/x11": "MIT", + "the open group license": "The Open Group License", + } l_rpm_license = rpm_license.lower() if l_rpm_license in mapping: - license, family = mapping[l_rpm_license], guess_license_family(mapping[l_rpm_license]) + license, family = ( + mapping[l_rpm_license], + guess_license_family(mapping[l_rpm_license]), + ) else: license, family = rpm_license, guess_license_family(rpm_license) # Yuck: - if family == 'APACHE': - family = 'Apache' - elif family == 'PUBLIC-DOMAIN': - family = 'Public-Domain' - elif family == 'PROPRIETARY': - family = 'Proprietary' - elif family == 'OTHER': - family = 'Other' + if family == "APACHE": + family = "Apache" + elif family == "PUBLIC-DOMAIN": + family = "Public-Domain" + elif family == "PROPRIETARY": + family = "Proprietary" + elif family == "OTHER": + family = "Other" return license, family @@ -459,19 +494,28 @@ def tidy_text(text, wrap_at=0): return stripped -def write_conda_recipes(recursive, repo_primary, package, architectures, - cdt, output_dir, override_arch, src_cache): - entry, entry_name, arch = find_repo_entry_and_arch(repo_primary, architectures, - dict({'name': package})) +def write_conda_recipes( + recursive, + repo_primary, + package, + architectures, + cdt, + output_dir, + override_arch, + src_cache, +): + entry, entry_name, arch = find_repo_entry_and_arch( + repo_primary, architectures, dict({"name": package}) + ) if not entry: return if override_arch: arch = architectures[0] else: - arch = cdt['fname_architecture'] + arch = cdt["fname_architecture"] package = entry_name - rpm_url = dirname(dirname(cdt['base_url'])) + '/' + entry['location'] - srpm_url = cdt['sbase_url'] + entry['source'] + rpm_url = dirname(dirname(cdt["base_url"])) + "/" + entry["location"] + srpm_url = cdt["sbase_url"] + entry["source"] _, _, _, _, _, sha256str = rpm_split_url_and_cache(rpm_url, src_cache) try: # We ignore the hash of source RPMs since they @@ -479,103 +523,121 @@ def write_conda_recipes(recursive, repo_primary, package, architectures, _, _, _, _, _, _ = rpm_split_url_and_cache(srpm_url, src_cache) except: # Just pretend the binaries are sources. - if 'allow_missing_sources' in cdt: + if "allow_missing_sources" in cdt: srpm_url = rpm_url else: raise - depends = [required for required in entry['requires'] if valid_depends(required)] + depends = [required for required in entry["requires"] if valid_depends(required)] - if package in cdt['dependency_add']: - for missing_dep in cdt['dependency_add'][package]: - e_missing, e_name_missing, _ = find_repo_entry_and_arch(repo_primary, architectures, - dict({'name': missing_dep})) + if package in cdt["dependency_add"]: + for missing_dep in cdt["dependency_add"][package]: + e_missing, e_name_missing, _ = find_repo_entry_and_arch( + repo_primary, architectures, dict({"name": missing_dep}) + ) if e_missing: - for provides in e_missing['provides']: - if provides['name'] == e_name_missing: + for provides in e_missing["provides"]: + if provides["name"] == e_name_missing: copy_provides = copy(provides) if "rel" in copy_provides: del copy_provides["rel"] depends.append(copy_provides) else: - print('WARNING: Additional dependency of {}, {} not found'.format(package, - missing_dep)) + print( + f"WARNING: Additional dependency of {package}, {missing_dep} not found" + ) for depend in depends: - dep_entry, dep_name, dep_arch = find_repo_entry_and_arch(repo_primary, - architectures, - depend) + dep_entry, dep_name, dep_arch = find_repo_entry_and_arch( + repo_primary, architectures, depend + ) if override_arch: dep_arch = architectures[0] - depend['arch'] = dep_arch + depend["arch"] = dep_arch # Because something else may provide a substitute for the wanted package # we need to also overwrite the versions with those of the provider, e.g. # libjpeg 6b is provided by libjpeg-turbo 1.2.1 - if depend['name'] != dep_name and 'version' in dep_entry: - if 'ver' in dep_entry['version']: - depend['ver'] = dep_entry['version']['ver'] - if 'epoch' in dep_entry['version']: - depend['epoch'] = dep_entry['version']['epoch'] + if depend["name"] != dep_name and "version" in dep_entry: + if "ver" in dep_entry["version"]: + depend["ver"] = dep_entry["version"]["ver"] + if "epoch" in dep_entry["version"]: + depend["epoch"] = dep_entry["version"]["epoch"] if recursive: - depend['name'] = write_conda_recipes(recursive, - repo_primary, - depend['name'], - architectures, - cdt, - output_dir, - override_arch, - src_cache) - - sn = cdt['short_name'] + '-' + arch + depend["name"] = write_conda_recipes( + recursive, + repo_primary, + depend["name"], + architectures, + cdt, + output_dir, + override_arch, + src_cache, + ) + + sn = cdt["short_name"] + "-" + arch dependsstr = "" if len(depends): - depends_specs = ["{}-{}-{} {}{}".format(depend['name'].lower().replace('+', 'x'), - cdt['short_name'], depend['arch'], - depend['flags'], depend['ver']) - for depend in depends] - dependsstr_part = '\n'.join([f' - {depends_spec}' - for depends_spec in depends_specs]) - dependsstr_build = ' build:\n' + dependsstr_part + '\n' - dependsstr_host = ' host:\n' + dependsstr_part + '\n' - dependsstr_run = ' run:\n' + dependsstr_part - dependsstr = 'requirements:\n' + dependsstr_build + dependsstr_host + dependsstr_run - - package_l = package.lower().replace('+', 'x') - package_cdt_name = package_l + '-' + sn - license, license_family = remap_license(entry['license']) - d = dict({'version': entry['version']['ver'], - 'packagename': package_cdt_name, - 'hostmachine': cdt['host_machine'], - 'hostsubdir': cdt['host_subdir'], - 'depends': dependsstr, - 'rpmurl': rpm_url, - 'srcrpmurl': srpm_url, - 'home': entry['home'], - 'license': license, - 'license_family': license_family, - 'checksum_name': cdt['checksummer_name'], - 'checksum': entry['checksum'], - 'summary': '"(CDT) ' + tidy_text(entry['summary']) + '"', - 'description': '|\n ' + '\n '.join(tidy_text(entry['description'], 78)), # noqa - # Cheeky workaround. I use ${PREFIX}, - # ${PWD}, ${RPM} and ${RECIPE_DIR} in - # BUILDSH and they get interpreted as - # format string tokens so bounce them - # back. - 'PREFIX': '{PREFIX}', - 'RPM': '{RPM}', - 'PWD': '{PWD}', - 'RECIPE_DIR': '{RECIPE_DIR}', - 'SRC_DIR': '{SRC_DIR}'}) + depends_specs = [ + "{}-{}-{} {}{}".format( + depend["name"].lower().replace("+", "x"), + cdt["short_name"], + depend["arch"], + depend["flags"], + depend["ver"], + ) + for depend in depends + ] + dependsstr_part = "\n".join( + [f" - {depends_spec}" for depends_spec in depends_specs] + ) + dependsstr_build = " build:\n" + dependsstr_part + "\n" + dependsstr_host = " host:\n" + dependsstr_part + "\n" + dependsstr_run = " run:\n" + dependsstr_part + dependsstr = ( + "requirements:\n" + dependsstr_build + dependsstr_host + dependsstr_run + ) + + package_l = package.lower().replace("+", "x") + package_cdt_name = package_l + "-" + sn + license, license_family = remap_license(entry["license"]) + d = dict( + { + "version": entry["version"]["ver"], + "packagename": package_cdt_name, + "hostmachine": cdt["host_machine"], + "hostsubdir": cdt["host_subdir"], + "depends": dependsstr, + "rpmurl": rpm_url, + "srcrpmurl": srpm_url, + "home": entry["home"], + "license": license, + "license_family": license_family, + "checksum_name": cdt["checksummer_name"], + "checksum": entry["checksum"], + "summary": '"(CDT) ' + tidy_text(entry["summary"]) + '"', + "description": "|\n " + + "\n ".join(tidy_text(entry["description"], 78)), # noqa + # Cheeky workaround. I use ${PREFIX}, + # ${PWD}, ${RPM} and ${RECIPE_DIR} in + # BUILDSH and they get interpreted as + # format string tokens so bounce them + # back. + "PREFIX": "{PREFIX}", + "RPM": "{RPM}", + "PWD": "{PWD}", + "RECIPE_DIR": "{RECIPE_DIR}", + "SRC_DIR": "{SRC_DIR}", + } + ) odir = join(output_dir, package_cdt_name) try: makedirs(odir) except: pass - with open(join(odir, 'meta.yaml'), 'wb') as f: - f.write(RPM_META.format(**d).encode('utf-8')) - buildsh = join(odir, 'build.sh') - with open(buildsh, 'wb') as f: + with open(join(odir, "meta.yaml"), "wb") as f: + f.write(RPM_META.format(**d).encode("utf-8")) + buildsh = join(odir, "build.sh") + with open(buildsh, "wb") as f: chmod(buildsh, 0o755) - f.write(BUILDSH.format(**d).encode('utf-8')) + f.write(BUILDSH.format(**d).encode("utf-8")) return package @@ -583,15 +645,23 @@ def write_conda_recipes(recursive, repo_primary, package, architectures, # name their RPMs differently we probably want to hide that away from users # Do I want to pass just the package name, the CDT and the arch and rely on # expansion to form the URL? I have been going backwards and forwards here. -def write_conda_recipe(packages, distro, output_dir, architecture, recursive, override_arch, - dependency_add, config): +def write_conda_recipe( + packages: list[str], + distro: str, + output_dir: str, + architecture: str, + recursive: bool, + override_arch: bool, + dependency_add: list[str], + config: Config | None, +): cdt_name = distro - bits = '32' if architecture in ('armv6', 'armv7a', 'i686', 'i386') else '64' - base_architectures = dict({'i686': 'i386'}) + bits = "32" if architecture in ("armv6", "armv7a", "i686", "i386") else "64" + base_architectures = dict({"i686": "i386"}) # gnu_architectures are those recognized by the canonical config.sub / config.guess # and crosstool-ng. They are returned from ${CC} -dumpmachine and are a part of the # sysroot. - gnu_architectures = dict({'ppc64le': 'powerpc64le'}) + gnu_architectures = dict({"ppc64le": "powerpc64le"}) try: base_architecture = base_architectures[architecture] except: @@ -600,10 +670,14 @@ def write_conda_recipe(packages, distro, output_dir, architecture, recursive, ov gnu_architecture = gnu_architectures[architecture] except: gnu_architecture = architecture - architecture_bits = dict({'architecture': architecture, - 'base_architecture': base_architecture, - 'gnu_architecture': gnu_architecture, - 'bits': bits}) + architecture_bits = dict( + { + "architecture": architecture, + "base_architecture": base_architecture, + "gnu_architecture": gnu_architecture, + "bits": bits, + } + ) cdt = dict() for k, v in CDTs[cdt_name].items(): if isinstance(v, str): @@ -613,53 +687,68 @@ def write_conda_recipe(packages, distro, output_dir, architecture, recursive, ov # Add undeclared dependencies. These can be baked into the global # CDTs dict, passed in on the commandline or a mixture of both. - if 'dependency_add' not in cdt: - cdt['dependency_add'] = dict() + if "dependency_add" not in cdt: + cdt["dependency_add"] = dict() if dependency_add: for package_and_missed_deps in dependency_add: - as_list = package_and_missed_deps[0].split(',') - if as_list[0] in cdt['dependency_add']: - cdt['dependency_add'][as_list[0]].extend(as_list[1:]) + as_list = package_and_missed_deps[0].split(",") + if as_list[0] in cdt["dependency_add"]: + cdt["dependency_add"][as_list[0]].extend(as_list[1:]) else: - cdt['dependency_add'][as_list[0]] = as_list[1:] + cdt["dependency_add"][as_list[0]] = as_list[1:] - repomd_url = cdt['repomd_url'] - repo_primary = get_repo_dict(repomd_url, - "primary", massage_primary, - cdt, - config.src_cache) + repomd_url = cdt["repomd_url"] + repo_primary = get_repo_dict( + repomd_url, "primary", massage_primary, cdt, config.src_cache + ) for package in packages: - write_conda_recipes(recursive, - repo_primary, - package, - [architecture, "noarch"], - cdt, - output_dir, - override_arch, - config.src_cache) - - -def skeletonize(packages, output_dir=".", version=None, recursive=False, - architecture=default_architecture, override_arch=True, - dependency_add=[], config=None, distro=default_distro): - write_conda_recipe(packages, distro, output_dir, architecture, recursive, - override_arch, dependency_add, config) + write_conda_recipes( + recursive, + repo_primary, + package, + [architecture, "noarch"], + cdt, + output_dir, + override_arch, + config.src_cache, + ) + + +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + architecture: str = default_architecture, + override_arch: bool = True, + dependency_add: str | Iterable[str] | None = None, + config: Config | None = None, + distro: str = default_distro, +): + dependency_add = ensure_list(dependency_add) + + write_conda_recipe( + packages, + distro, + output_dir, + architecture, + recursive, + override_arch, + dependency_add, + config, + ) def add_parser(repos): - rpm = repos.add_parser( "rpm", help=""" Create recipe skeleton for RPM files - """,) - - rpm.add_argument( - "packages", - nargs='+', - help="RPM package name(s)" + """, ) + rpm.add_argument("packages", nargs="+", help="RPM package name(s)") + rpm.add_argument( "--output-dir", help="Directory to write recipes to (default: %(default)s).", @@ -668,16 +757,16 @@ def add_parser(repos): rpm.add_argument( "--recursive", - action='store_true', - dest='recursive', - help='Create recipes for dependencies if they do not already exist', + action="store_true", + dest="recursive", + help="Create recipes for dependencies if they do not already exist", ) rpm.add_argument( "--dependency-add", - nargs='+', - action='append', - help='Add undeclared dependencies (format: package,missing_dep1,missing_dep2)', + nargs="+", + action="append", + help="Add undeclared dependencies (format: package,missing_dep1,missing_dep2)", ) rpm.add_argument( @@ -696,17 +785,22 @@ def valid_distros(): def distro(distro_name): if distro_name not in CDTs: - raise argparse.ArgumentTypeError(f"valid --distro values are {valid_distros()}") + raise argparse.ArgumentTypeError( + f"valid --distro values are {valid_distros()}" + ) return distro_name - rpm.add_argument("--distro", - type=distro, - default=default_distro, - help="Distro to use. Applies to all packages, valid values are: {}".format( - valid_distros())) - - rpm.add_argument("--no-override-arch", - help="Do not override noarch in package names", - dest="override_arch", - default=True, - action="store_false") + rpm.add_argument( + "--distro", + type=distro, + default=default_distro, + help=f"Distro to use. Applies to all packages, valid values are: {valid_distros()}", + ) + + rpm.add_argument( + "--no-override-arch", + help="Do not override noarch in package names", + dest="override_arch", + default=True, + action="store_false", + ) diff --git a/conda_build/source.py b/conda_build/source.py index 5dbaa59d52..903f5d7ca0 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -1,95 +1,111 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import locale import os -from os.path import join, isdir, isfile, abspath, basename, exists, normpath, expanduser import re import shutil -from subprocess import CalledProcessError import sys import tempfile import time +from os.path import abspath, basename, exists, expanduser, isdir, isfile, join, normpath from pathlib import Path -from typing import Optional +from subprocess import CalledProcessError +from typing import TYPE_CHECKING +from urllib.parse import urljoin -from .conda_interface import download, TemporaryDirectory -from .conda_interface import hashsum_file +from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.read import compute_sum +from conda.utils import url_path from .exceptions import MissingDependency -from conda_build.os_utils import external -from conda_build.conda_interface import url_path, CondaHTTPError -from conda_build.utils import (decompressible_exts, tar_xf, safe_print_unicode, copy_into, on_win, ensure_list, - check_output_env, check_call_env, convert_path_for_cygwin_or_msys2, - get_logger, rm_rf, LoggingContext) - +from .os_utils import external +from .utils import ( + LoggingContext, + check_call_env, + check_output_env, + convert_path_for_cygwin_or_msys2, + convert_unix_path_to_win, + copy_into, + decompressible_exts, + ensure_list, + get_logger, + on_win, + rm_rf, + safe_print_unicode, + tar_xf, +) + +if TYPE_CHECKING: + from typing import Iterable log = get_logger(__name__) -if on_win: - from conda_build.utils import convert_unix_path_to_win -if sys.version_info[0] == 3: - from urllib.parse import urljoin -else: - from urlparse import urljoin - -git_submod_re = re.compile(r'(?:.+)\.(.+)\.(?:.+)\s(.+)') +git_submod_re = re.compile(r"(?:.+)\.(.+)\.(?:.+)\s(.+)") ext_re = re.compile(r"(.*?)(\.(?:tar\.)?[^.]+)$") def append_hash_to_fn(fn, hash_value): - return ext_re.sub(fr"\1_{hash_value[:10]}\2", fn) + return ext_re.sub(rf"\1_{hash_value[:10]}\2", fn) def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): - ''' Download a source to the local cache. ''' + """Download a source to the local cache.""" if verbose: - log.info('Source cache directory is: %s' % cache_folder) + log.info(f"Source cache directory is: {cache_folder}") if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) - source_urls = source_dict['url'] + source_urls = source_dict["url"] if not isinstance(source_urls, list): source_urls = [source_urls] - unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(source_urls[0]) + unhashed_fn = fn = ( + source_dict["fn"] if "fn" in source_dict else basename(source_urls[0]) + ) hash_added = False - for hash_type in ('md5', 'sha1', 'sha256'): + for hash_type in ("md5", "sha1", "sha256"): if hash_type in source_dict: if source_dict[hash_type] in (None, ""): - raise ValueError(f'Empty {hash_type} hash provided for {fn}') + raise ValueError(f"Empty {hash_type} hash provided for {fn}") fn = append_hash_to_fn(fn, source_dict[hash_type]) hash_added = True break else: - log.warn("No hash (md5, sha1, sha256) provided for {}. Source download forced. " - "Add hash to recipe to use source cache.".format(unhashed_fn)) + log.warn( + f"No hash (md5, sha1, sha256) provided for {unhashed_fn}. Source download forced. " + "Add hash to recipe to use source cache." + ) path = join(cache_folder, fn) if isfile(path): if verbose: - log.info('Found source in cache: %s' % fn) + log.info(f"Found source in cache: {fn}") else: if verbose: - log.info('Downloading source to cache: %s' % fn) + log.info(f"Downloading source to cache: {fn}") for url in source_urls: if "://" not in url: - if url.startswith('~'): + if url.startswith("~"): url = expanduser(url) if not os.path.isabs(url): url = os.path.normpath(os.path.join(recipe_path, url)) url = url_path(url) else: - if url.startswith('file:///~'): - url = 'file:///' + expanduser(url[8:]).replace('\\', '/') + if url.startswith("file:///~"): + url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: - log.info("Downloading %s" % url) + log.info(f"Downloading {url}") with LoggingContext(): download(url, path) except CondaHTTPError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) except RuntimeError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) else: if verbose: @@ -97,24 +113,25 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: # no break rm_rf(path) - raise RuntimeError("Could not download %s" % url) + raise RuntimeError(f"Could not download {url}") hashed = None - for tp in ('md5', 'sha1', 'sha256'): + for tp in ("md5", "sha1", "sha256"): if tp in source_dict: expected_hash = source_dict[tp] - hashed = hashsum_file(path, tp) + hashed = compute_sum(path, tp) if expected_hash != hashed: rm_rf(path) - raise RuntimeError("%s mismatch: '%s' != '%s'" % - (tp.upper(), hashed, expected_hash)) + raise RuntimeError( + f"{tp.upper()} mismatch: '{hashed}' != '{expected_hash}'" + ) break # this is really a fallback. If people don't provide the hash, we still need to prevent # collisions in our source cache, but the end user will get no benefit from the cache. if not hash_added: if not hashed: - hashed = hashsum_file(path, 'sha256') + hashed = compute_sum(path, "sha256") dest_path = append_hash_to_fn(path, hashed) if not os.path.isfile(dest_path): shutil.move(path, dest_path) @@ -138,10 +155,20 @@ def hoist_single_extracted_folder(nested_folder): shutil.move(os.path.join(tmpdir, entry), os.path.join(parent, entry)) -def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False, - timeout=900, locking=True): - ''' Uncompress a downloaded source. ''' - src_path, unhashed_fn = download_to_cache(cache_folder, recipe_path, source_dict, verbose) +def unpack( + source_dict, + src_dir, + cache_folder, + recipe_path, + croot, + verbose=False, + timeout=900, + locking=True, +): + """Uncompress a downloaded source.""" + src_path, unhashed_fn = download_to_cache( + cache_folder, recipe_path, source_dict, verbose + ) if not isdir(src_dir): os.makedirs(src_dir) @@ -153,9 +180,11 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False tar_xf(src_path, tmpdir) else: # In this case, the build script will need to deal with unpacking the source - print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR") + print( + "Warning: Unrecognized source format. Source file will be copied to the SRC_DIR" + ) copy_into(src_path, unhashed_dest, timeout, locking=locking) - if src_path.lower().endswith('.whl'): + if src_path.lower().endswith(".whl"): # copy wheel itself *and* unpack it # This allows test_files or about.license_file to locate files in the wheel, # as well as `pip install name-version.whl` as install command @@ -164,56 +193,86 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False folder = os.path.join(tmpdir, flist[0]) # Hoisting is destructive of information, in CDT packages, a single top level # folder of /usr64 must not be discarded. - if len(flist) == 1 and os.path.isdir(folder) and 'no_hoist' not in source_dict: + if len(flist) == 1 and os.path.isdir(folder) and "no_hoist" not in source_dict: hoist_single_extracted_folder(folder) flist = os.listdir(tmpdir) for f in flist: shutil.move(os.path.join(tmpdir, f), os.path.join(src_dir, f)) -def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_cache, git_ref=None, - git_depth=-1, is_top_level=True, verbose=True): - """ Mirror (and checkout) a Git repository recursively. - - It's not possible to use `git submodule` on a bare - repository, so the checkout must be done before we - know which submodules there are. - - Worse, submodules can be identified by using either - absolute URLs or relative paths. If relative paths - are used those need to be relocated upon mirroring, - but you could end up with `../../../../blah` and in - that case conda-build could be tricked into writing - to the root of the drive and overwriting the system - folders unless steps are taken to prevent that. +def check_git_lfs(git, cwd, git_ref): + try: + lfs_list_output = check_output_env([git, "lfs", "ls-files", git_ref], cwd=cwd) + return lfs_list_output and lfs_list_output.strip() + except CalledProcessError: + return False + + +def git_lfs_fetch(git, cwd, git_ref, stdout, stderr): + lfs_version = check_output_env([git, "lfs", "version"], cwd=cwd) + log.info(lfs_version) + check_call_env( + [git, "lfs", "fetch", "origin", git_ref], cwd=cwd, stdout=stdout, stderr=stderr + ) + + +def git_mirror_checkout_recursive( + git, + mirror_dir, + checkout_dir, + git_url, + git_cache, + git_ref=None, + git_depth=-1, + is_top_level=True, + verbose=True, +): + """Mirror (and checkout) a Git repository recursively. + + It's not possible to use `git submodule` on a bare + repository, so the checkout must be done before we + know which submodules there are. + + Worse, submodules can be identified by using either + absolute URLs or relative paths. If relative paths + are used those need to be relocated upon mirroring, + but you could end up with `../../../../blah` and in + that case conda-build could be tricked into writing + to the root of the drive and overwriting the system + folders unless steps are taken to prevent that. """ if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL if not mirror_dir.startswith(git_cache + os.sep): - sys.exit("Error: Attempting to mirror to %s which is outside of GIT_CACHE %s" - % (mirror_dir, git_cache)) + sys.exit( + f"Error: Attempting to mirror to {mirror_dir} which is outside of GIT_CACHE {git_cache}" + ) # This is necessary for Cygwin git and m2-git, although it is fixed in newer MSYS2. - git_mirror_dir = convert_path_for_cygwin_or_msys2(git, mirror_dir).rstrip('/') - git_checkout_dir = convert_path_for_cygwin_or_msys2(git, checkout_dir).rstrip('/') + git_mirror_dir = convert_path_for_cygwin_or_msys2(git, mirror_dir).rstrip("/") + git_checkout_dir = convert_path_for_cygwin_or_msys2(git, checkout_dir).rstrip("/") # Set default here to catch empty dicts - git_ref = git_ref or 'HEAD' + git_ref = git_ref or "HEAD" - mirror_dir = mirror_dir.rstrip('/') + mirror_dir = mirror_dir.rstrip("/") if not isdir(os.path.dirname(mirror_dir)): os.makedirs(os.path.dirname(mirror_dir)) if isdir(mirror_dir): try: - if git_ref != 'HEAD': - check_call_env([git, 'fetch'], cwd=mirror_dir, stdout=stdout, stderr=stderr) + if git_ref != "HEAD": + check_call_env( + [git, "fetch"], cwd=mirror_dir, stdout=stdout, stderr=stderr + ) + if check_git_lfs(git, mirror_dir, git_ref): + git_lfs_fetch(git, mirror_dir, git_ref, stdout, stderr) else: # Unlike 'git clone', fetch doesn't automatically update the cache's HEAD, # So here we explicitly store the remote HEAD in the cache's local refs/heads, @@ -221,13 +280,28 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca # This is important when the git repo is a local path like "git_url: ../", # but the user is working with a branch other than 'master' without # explicitly providing git_rev. - check_call_env([git, 'fetch', 'origin', '+HEAD:_conda_cache_origin_head'], - cwd=mirror_dir, stdout=stdout, stderr=stderr) - check_call_env([git, 'symbolic-ref', 'HEAD', 'refs/heads/_conda_cache_origin_head'], - cwd=mirror_dir, stdout=stdout, stderr=stderr) + check_call_env( + [git, "fetch", "origin", "+HEAD:_conda_cache_origin_head"], + cwd=mirror_dir, + stdout=stdout, + stderr=stderr, + ) + check_call_env( + [ + git, + "symbolic-ref", + "HEAD", + "refs/heads/_conda_cache_origin_head", + ], + cwd=mirror_dir, + stdout=stdout, + stderr=stderr, + ) except CalledProcessError: - msg = ("Failed to update local git cache. " - "Deleting local cached repo: {} ".format(mirror_dir)) + msg = ( + "Failed to update local git cache. " + f"Deleting local cached repo: {mirror_dir} " + ) print(msg) # Maybe the failure was caused by a corrupt mirror directory. @@ -235,184 +309,238 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca shutil.rmtree(mirror_dir) raise else: - args = [git, 'clone', '--mirror'] + args = [git, "clone", "--mirror"] if git_depth > 0: - args += ['--depth', str(git_depth)] + args += ["--depth", str(git_depth)] try: - check_call_env(args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr) + check_call_env( + args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr + ) + if check_git_lfs(git, mirror_dir, git_ref): + git_lfs_fetch(git, mirror_dir, git_ref, stdout, stderr) except CalledProcessError: # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == 'win32' and git_url.startswith('/'): + if on_win and git_url.startswith("/"): git_url = convert_unix_path_to_win(git_url) if os.path.exists(git_url): # Local filepaths are allowed, but make sure we normalize them git_url = normpath(git_url) - check_call_env(args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr) + check_call_env( + args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr + ) assert isdir(mirror_dir) # Now clone from mirror_dir into checkout_dir. - check_call_env([git, 'clone', git_mirror_dir, git_checkout_dir], stdout=stdout, stderr=stderr) + check_call_env( + [git, "clone", git_mirror_dir, git_checkout_dir], stdout=stdout, stderr=stderr + ) if is_top_level: checkout = git_ref - if git_url.startswith('.'): - output = check_output_env([git, "rev-parse", checkout], stdout=stdout, stderr=stderr) - checkout = output.decode('utf-8') + if git_url.startswith("."): + output = check_output_env( + [git, "rev-parse", checkout], stdout=stdout, stderr=stderr + ) + checkout = output.decode("utf-8") if verbose: - print('checkout: %r' % checkout) + print(f"checkout: {checkout!r}") if checkout: - check_call_env([git, 'checkout', checkout], - cwd=checkout_dir, stdout=stdout, stderr=stderr) + check_call_env( + [git, "checkout", checkout], + cwd=checkout_dir, + stdout=stdout, + stderr=stderr, + ) # submodules may have been specified using relative paths. # Those paths are relative to git_url, and will not exist # relative to mirror_dir, unless we do some work to make # it so. try: - submodules = check_output_env([git, 'config', '--file', '.gitmodules', '--get-regexp', - 'url'], stderr=stdout, cwd=checkout_dir) - submodules = submodules.decode('utf-8').splitlines() + submodules = check_output_env( + [git, "config", "--file", ".gitmodules", "--get-regexp", "url"], + stderr=stdout, + cwd=checkout_dir, + ) + submodules = submodules.decode("utf-8").splitlines() except CalledProcessError: submodules = [] for submodule in submodules: matches = git_submod_re.match(submodule) - if matches and matches.group(2)[0] == '.': + if matches and matches.group(2)[0] == ".": submod_name = matches.group(1) submod_rel_path = matches.group(2) - submod_url = urljoin(git_url + '/', submod_rel_path) + submod_url = urljoin(git_url + "/", submod_rel_path) submod_mirror_dir = os.path.normpath( - os.path.join(mirror_dir, submod_rel_path)) + os.path.join(mirror_dir, submod_rel_path) + ) if verbose: - print('Relative submodule {} found: url is {}, submod_mirror_dir is {}'.format( - submod_name, submod_url, submod_mirror_dir)) + print( + f"Relative submodule {submod_name} found: url is {submod_url}, " + f"submod_mirror_dir is {submod_mirror_dir}" + ) with TemporaryDirectory() as temp_checkout_dir: - git_mirror_checkout_recursive(git, submod_mirror_dir, temp_checkout_dir, submod_url, - git_cache=git_cache, git_ref=git_ref, - git_depth=git_depth, is_top_level=False, - verbose=verbose) + git_mirror_checkout_recursive( + git, + submod_mirror_dir, + temp_checkout_dir, + submod_url, + git_cache=git_cache, + git_ref=git_ref, + git_depth=git_depth, + is_top_level=False, + verbose=verbose, + ) if is_top_level: # Now that all relative-URL-specified submodules are locally mirrored to # relatively the same place we can go ahead and checkout the submodules. - check_call_env([git, 'submodule', 'update', '--init', - '--recursive'], cwd=checkout_dir, stdout=stdout, stderr=stderr) + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "update", + "--init", + "--recursive", + ], + cwd=checkout_dir, + stdout=stdout, + stderr=stderr, + ) git_info(checkout_dir, None, git=git, verbose=verbose) if not verbose: FNULL.close() def git_source(source_dict, git_cache, src_dir, recipe_path=None, verbose=True): - ''' Download a source from a Git repo (or submodule, recursively) ''' + """Download a source from a Git repo (or submodule, recursively)""" if not isdir(git_cache): os.makedirs(git_cache) - git = external.find_executable('git') + git = external.find_executable("git") if not git: - sys.exit("Error: git is not installed in your root environment or as a build requirement.") + sys.exit( + "Error: git is not installed in your root environment or as a build requirement." + ) - git_depth = int(source_dict.get('git_depth', -1)) - git_ref = source_dict.get('git_rev') or 'HEAD' + git_depth = int(source_dict.get("git_depth", -1)) + git_ref = source_dict.get("git_rev") or "HEAD" - git_url = source_dict['git_url'] - if git_url.startswith('~'): + git_url = source_dict["git_url"] + if git_url.startswith("~"): git_url = os.path.expanduser(git_url) - if git_url.startswith('.'): + if git_url.startswith("."): # It's a relative path from the conda recipe git_url = abspath(normpath(os.path.join(recipe_path, git_url))) - if sys.platform == 'win32': - git_dn = git_url.replace(':', '_') + if on_win: + git_dn = git_url.replace(":", "_") else: git_dn = git_url[1:] else: - git_dn = git_url.split('://')[-1].replace('/', os.sep) + git_dn = git_url.split("://")[-1].replace("/", os.sep) if git_dn.startswith(os.sep): git_dn = git_dn[1:] - git_dn = git_dn.replace(':', '_') + git_dn = git_dn.replace(":", "_") mirror_dir = join(git_cache, git_dn) git_mirror_checkout_recursive( - git, mirror_dir, src_dir, git_url, git_cache=git_cache, git_ref=git_ref, - git_depth=git_depth, is_top_level=True, verbose=verbose) + git, + mirror_dir, + src_dir, + git_url, + git_cache=git_cache, + git_ref=git_ref, + git_depth=git_depth, + is_top_level=True, + verbose=verbose, + ) return git # Why not use get_git_info instead? def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): - ''' Print info about a Git repo. ''' + """Print info about a Git repo.""" assert isdir(src_dir) if not git: - git = external.find_executable('git', build_prefix) + git = external.find_executable("git", build_prefix) if not git: - log.warn("git not installed in root environment. Skipping recording of git info.") + log.warn( + "git not installed in root environment. Skipping recording of git info." + ) return if verbose: stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stderr = FNULL # Ensure to explicitly set GIT_DIR as some Linux machines will not # properly execute without it. env = os.environ.copy() - env['GIT_DIR'] = join(src_dir, '.git') + env["GIT_DIR"] = join(src_dir, ".git") env = {str(key): str(value) for key, value in env.items()} for cmd, check_error in ( - ((git, 'log', '-n1'), True), - ((git, 'describe', '--tags', '--dirty'), False), - ((git, 'status'), True)): + ((git, "log", "-n1"), True), + ((git, "describe", "--tags", "--dirty"), False), + ((git, "status"), True), + ): try: stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: if check_error: - raise Exception("git error: %s" % str(e)) + raise Exception(f"git error: {str(e)}") encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding - encoding = encoding or 'utf-8' - if hasattr(stdout, 'decode'): - stdout = stdout.decode(encoding, 'ignore') + encoding = encoding or "utf-8" + if hasattr(stdout, "decode"): + stdout = stdout.decode(encoding, "ignore") if fo: - fo.write('==> {} <==\n'.format(' '.join(cmd))) + fo.write("==> {} <==\n".format(" ".join(cmd))) if verbose: - fo.write(stdout + '\n') + fo.write(stdout + "\n") else: if verbose: - print('==> {} <==\n'.format(' '.join(cmd))) - safe_print_unicode(stdout + '\n') + print("==> {} <==\n".format(" ".join(cmd))) + safe_print_unicode(stdout + "\n") def hg_source(source_dict, src_dir, hg_cache, verbose): - ''' Download a source from Mercurial repo. ''' + """Download a source from Mercurial repo.""" if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL - hg_url = source_dict['hg_url'] + hg_url = source_dict["hg_url"] if not isdir(hg_cache): os.makedirs(hg_cache) - hg_dn = hg_url.split(':')[-1].replace('/', '_') + hg_dn = hg_url.split(":")[-1].replace("/", "_") cache_repo = join(hg_cache, hg_dn) if isdir(cache_repo): - check_call_env(['hg', 'pull'], cwd=cache_repo, stdout=stdout, stderr=stderr) + check_call_env(["hg", "pull"], cwd=cache_repo, stdout=stdout, stderr=stderr) else: - check_call_env(['hg', 'clone', hg_url, cache_repo], stdout=stdout, stderr=stderr) + check_call_env( + ["hg", "clone", hg_url, cache_repo], stdout=stdout, stderr=stderr + ) assert isdir(cache_repo) # now clone in to work directory - update = source_dict.get('hg_tag') or 'tip' + update = source_dict.get("hg_tag") or "tip" if verbose: - print('checkout: %r' % update) + print(f"checkout: {update!r}") - check_call_env(['hg', 'clone', cache_repo, src_dir], stdout=stdout, - stderr=stderr) - check_call_env(['hg', 'update', '-C', update], cwd=src_dir, stdout=stdout, - stderr=stderr) + check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) + check_call_env( + ["hg", "update", "-C", update], cwd=src_dir, stdout=stdout, stderr=stderr + ) if not verbose: FNULL.close() @@ -420,36 +548,55 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): return src_dir -def svn_source(source_dict, src_dir, svn_cache, verbose=True, timeout=900, locking=True): - ''' Download a source from SVN repo. ''' +def svn_source( + source_dict, src_dir, svn_cache, verbose=True, timeout=900, locking=True +): + """Download a source from SVN repo.""" if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL def parse_bool(s): - return str(s).lower().strip() in ('yes', 'true', '1', 'on') + return str(s).lower().strip() in ("yes", "true", "1", "on") - svn_url = source_dict['svn_url'] - svn_revision = source_dict.get('svn_rev') or 'head' - svn_ignore_externals = parse_bool(source_dict.get('svn_ignore_externals') or 'no') + svn_url = source_dict["svn_url"] + svn_revision = source_dict.get("svn_rev") or "head" + svn_ignore_externals = parse_bool(source_dict.get("svn_ignore_externals") or "no") if not isdir(svn_cache): os.makedirs(svn_cache) - svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_') + svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_") cache_repo = join(svn_cache, svn_dn) + extra_args = [] if svn_ignore_externals: - extra_args = ['--ignore-externals'] - else: - extra_args = [] + extra_args.append("--ignore-externals") + if "svn_username" in source_dict and "svn_password" in source_dict: + extra_args.extend( + [ + "--non-interactive", + "--no-auth-cache", + "--username", + source_dict.get("svn_username"), + "--password", + source_dict.get("svn_password"), + ] + ) if isdir(cache_repo): - check_call_env(['svn', 'up', '-r', svn_revision] + extra_args, cwd=cache_repo, - stdout=stdout, stderr=stderr) + check_call_env( + ["svn", "up", "-r", svn_revision] + extra_args, + cwd=cache_repo, + stdout=stdout, + stderr=stderr, + ) else: - check_call_env(['svn', 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo], - stdout=stdout, stderr=stderr) + check_call_env( + ["svn", "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo], + stdout=stdout, + stderr=stderr, + ) assert isdir(cache_repo) # now copy into work directory @@ -466,8 +613,9 @@ def get_repository_info(recipe_path): from the source - you can have a recipe in svn that gets source via git.""" try: if exists(join(recipe_path, ".git")): - origin = check_output_env(["git", "config", "--get", "remote.origin.url"], - cwd=recipe_path) + origin = check_output_env( + ["git", "config", "--get", "remote.origin.url"], cwd=recipe_path + ) rev = check_output_env(["git", "rev-parse", "HEAD"], cwd=recipe_path) return f"Origin {origin}, commit {rev}" elif isdir(join(recipe_path, ".hg")): @@ -476,26 +624,29 @@ def get_repository_info(recipe_path): return f"Origin {origin}, commit {rev}" elif isdir(join(recipe_path, ".svn")): info = check_output_env(["svn", "info"], cwd=recipe_path) - info = info.decode("utf-8") # Py3 returns a byte string, but re needs unicode or str. + info = info.decode( + "utf-8" + ) # Py3 returns a byte string, but re needs unicode or str. server = re.search("Repository Root: (.*)$", info, flags=re.M).group(1) revision = re.search("Revision: (.*)$", info, flags=re.M).group(1) return f"{server}, Revision {revision}" else: - return "{}, last modified {}".format(recipe_path, - time.ctime(os.path.getmtime( - join(recipe_path, "meta.yaml")))) + return "{}, last modified {}".format( + recipe_path, + time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))), + ) except CalledProcessError: get_logger(__name__).debug("Failed to checkout source in " + recipe_path) - return "{}, last modified {}".format(recipe_path, - time.ctime(os.path.getmtime( - join(recipe_path, "meta.yaml")))) + return "{}, last modified {}".format( + recipe_path, time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))) + ) _RE_LF = re.compile(rb"(? Path: +def _ensure_LF(src: os.PathLike, dst: os.PathLike | None = None) -> Path: """Replace windows line endings with Unix. Return path to modified file.""" src = Path(src) dst = Path(dst or src) # overwrite src if dst is undefined @@ -503,7 +654,7 @@ def _ensure_LF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: return dst -def _ensure_CRLF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: +def _ensure_CRLF(src: os.PathLike, dst: os.PathLike | None = None) -> Path: """Replace unix line endings with win. Return path to modified file.""" src = Path(src) dst = Path(dst or src) # overwrite src if dst is undefined @@ -511,65 +662,69 @@ def _ensure_CRLF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: return dst -def _guess_patch_strip_level(filesstr, src_dir): - """ Determine the patch strip level automatically. """ - maxlevel = None - files = {filestr.encode(errors='ignore') for filestr in filesstr} - src_dir = src_dir.encode(errors='ignore') +def _guess_patch_strip_level( + patches: Iterable[str | os.PathLike], src_dir: str | os.PathLike +) -> tuple[int, bool]: + """Determine the patch strip level automatically.""" + patches = set(map(Path, patches)) + maxlevel = min(len(patch.parent.parts) for patch in patches) guessed = False - for file in files: - numslash = file.count(b'/') - maxlevel = numslash if maxlevel is None else min(maxlevel, numslash) if maxlevel == 0: patchlevel = 0 else: histo = {i: 0 for i in range(maxlevel + 1)} - for file in files: - parts = file.split(b'/') + for patch in patches: + parts = patch.parts for level in range(maxlevel + 1): - if os.path.exists(join(src_dir, *parts[-len(parts) + level:])): + if Path(src_dir, *parts[-len(parts) + level :]).exists(): histo[level] += 1 order = sorted(histo, key=histo.get, reverse=True) if histo[order[0]] == histo[order[1]]: print("Patch level ambiguous, selecting least deep") guessed = True - patchlevel = min(key for key, value - in histo.items() if value == histo[order[0]]) + patchlevel = min( + key for key, value in histo.items() if value == histo[order[0]] + ) return patchlevel, guessed def _get_patch_file_details(path): - re_files = re.compile(r'^(?:---|\+\+\+) ([^\n\t]+)') + re_files = re.compile(r"^(?:---|\+\+\+) ([^\n\t]+)") files = [] - with open(path, errors='ignore') as f: + with open(path, errors="ignore") as f: files = [] first_line = True is_git_format = True for line in f.readlines(): - if first_line and not re.match(r'From [0-9a-f]{40}', line): + if first_line and not re.match(r"From [0-9a-f]{40}", line): is_git_format = False first_line = False m = re_files.search(line) - if m and m.group(1) != '/dev/null': + if m and m.group(1) != "/dev/null": files.append(m.group(1)) - elif is_git_format and line.startswith('git') and not line.startswith('git --diff'): + elif ( + is_git_format + and line.startswith("git") + and not line.startswith("git --diff") + ): is_git_format = False return (files, is_git_format) def _patch_attributes_debug(pa, rel_path, build_prefix): return "[[ {}{}{}{}{}{}{}{}{}{} ]] - [[ {:>71} ]]".format( - 'R' if pa['reversible'] else '-', - 'A' if pa['applicable'] else '-', - 'Y' if pa['patch_exe'].startswith(build_prefix) else '-', - 'M' if not pa['amalgamated'] else '-', - 'D' if pa['dry_runnable'] else '-', - str(pa['level']), - 'L' if not pa['level_ambiguous'] else '-', - 'O' if not pa['offsets'] else '-', - 'V' if not pa['fuzzy'] else '-', - 'E' if not pa['stderr'] else '-', - rel_path[-71:]) + "R" if pa["reversible"] else "-", + "A" if pa["applicable"] else "-", + "Y" if pa["patch_exe"].startswith(build_prefix) else "-", + "M" if not pa["amalgamated"] else "-", + "D" if pa["dry_runnable"] else "-", + str(pa["level"]), + "L" if not pa["level_ambiguous"] else "-", + "O" if not pa["offsets"] else "-", + "V" if not pa["fuzzy"] else "-", + "E" if not pa["stderr"] else "-", + rel_path[-71:], + ) def _patch_attributes_debug_print(attributes): @@ -577,14 +732,18 @@ def _patch_attributes_debug_print(attributes): print("Patch analysis gives:") print("\n".join(attributes)) print("\nKey:\n") - print("R :: Reversible A :: Applicable\n" - "Y :: Build-prefix patch in use M :: Minimal, non-amalgamated\n" - "D :: Dry-runnable N :: Patch level (1 is preferred)\n" - "L :: Patch level not-ambiguous O :: Patch applies without offsets\n" - "V :: Patch applies without fuzz E :: Patch applies without emitting to stderr\n") - - -def _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, retained_tmpdir=None): + print( + "R :: Reversible A :: Applicable\n" + "Y :: Build-prefix patch in use M :: Minimal, non-amalgamated\n" + "D :: Dry-runnable N :: Patch level (1 is preferred)\n" + "L :: Patch level not-ambiguous O :: Patch applies without offsets\n" + "V :: Patch applies without fuzz E :: Patch applies without emitting to stderr\n" + ) + + +def _get_patch_attributes( + path, patch_exe, git, src_dir, stdout, stderr, retained_tmpdir=None +): from collections import OrderedDict files_list, is_git_format = _get_patch_file_details(path) @@ -594,41 +753,47 @@ def _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, retaine amalgamated = True strip_level, strip_level_guessed = _guess_patch_strip_level(files, src_dir) if strip_level: - files = {f.split('/', strip_level)[-1] for f in files} + files = {f.split("/", strip_level)[-1] for f in files} # Defaults - result = {'patch': path, - 'files': files, - 'patch_exe': git if (git and is_git_format) else patch_exe, - 'format': 'git' if is_git_format else 'generic', - # If these remain 'unknown' we had no patch program to test with. - 'dry_runnable': None, - 'applicable': None, - 'reversible': None, - 'amalgamated': amalgamated, - 'offsets': None, - 'fuzzy': None, - 'stderr': None, - 'level': strip_level, - 'level_ambiguous': strip_level_guessed, - 'args': []} + result = { + "patch": path, + "files": files, + "patch_exe": git if (git and is_git_format) else patch_exe, + "format": "git" if is_git_format else "generic", + # If these remain 'unknown' we had no patch program to test with. + "dry_runnable": None, + "applicable": None, + "reversible": None, + "amalgamated": amalgamated, + "offsets": None, + "fuzzy": None, + "stderr": None, + "level": strip_level, + "level_ambiguous": strip_level_guessed, + "args": [], + } crlf = False lf = False - with open(path, errors='ignore') as f: + with open(path, errors="ignore") as f: _content = f.read() - for line in _content.split('\n'): - if line.startswith((' ', '+', '-')): - if line.endswith('\r'): + for line in _content.split("\n"): + if line.startswith((" ", "+", "-")): + if line.endswith("\r"): crlf = True else: lf = True - result['line_endings'] = 'mixed' if (crlf and lf) else 'crlf' if crlf else 'lf' + result["line_endings"] = "mixed" if (crlf and lf) else "crlf" if crlf else "lf" if not patch_exe: - log.warning(f"No patch program found, cannot determine patch attributes for {path}") + log.warning( + f"No patch program found, cannot determine patch attributes for {path}" + ) if not git: - log.error("No git program found either. Please add a dependency for one of these.") + log.error( + "No git program found either. Please add a dependency for one of these." + ) return result class noop_context: @@ -643,28 +808,28 @@ def __enter__(self): def __exit__(self, exc, value, tb): return - fmts = OrderedDict(native=['--binary'], - lf=[], - crlf=[]) + fmts = OrderedDict(native=["--binary"], lf=[], crlf=[]) if patch_exe: # Good, we have a patch executable so we can perform some checks: - with noop_context(retained_tmpdir) if retained_tmpdir else TemporaryDirectory() as tmpdir: + with noop_context( + retained_tmpdir + ) if retained_tmpdir else TemporaryDirectory() as tmpdir: # Make all the fmts. - result['patches'] = {} + result["patches"] = {} for fmt, _ in fmts.items(): - new_patch = os.path.join(tmpdir, os.path.basename(path) + f'.{fmt}') - if fmt == 'native': + new_patch = os.path.join(tmpdir, os.path.basename(path) + f".{fmt}") + if fmt == "native": try: shutil.copy2(path, new_patch) except: shutil.copy(path, new_patch) - elif fmt == 'lf': + elif fmt == "lf": _ensure_LF(path, new_patch) - elif fmt == 'crlf': + elif fmt == "crlf": _ensure_CRLF(path, new_patch) - result['patches'][fmt] = new_patch + result["patches"][fmt] = new_patch - tmp_src_dir = os.path.join(tmpdir, 'src_dir') + tmp_src_dir = os.path.join(tmpdir, "src_dir") def copy_to_be_patched_files(src_dir, tmp_src_dir, files): try: @@ -684,26 +849,32 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): shutil.copy2(os.path.join(src_dir, file), dst) copy_to_be_patched_files(src_dir, tmp_src_dir, files) - checks = OrderedDict(dry_runnable=['--dry-run'], - applicable=[], - reversible=['-R']) + checks = OrderedDict( + dry_runnable=["--dry-run"], applicable=[], reversible=["-R"] + ) for check_name, extra_args in checks.items(): for fmt, fmt_args in fmts.items(): - patch_args = ['-Np{}'.format(result['level']), - '-i', result['patches'][fmt]] + extra_args + fmt_args + patch_args = ( + ["-Np{}".format(result["level"]), "-i", result["patches"][fmt]] + + extra_args + + fmt_args + ) try: env = os.environ.copy() - env['LC_ALL'] = 'C' - from subprocess import Popen, PIPE - process = Popen([patch_exe] + patch_args, - cwd=tmp_src_dir, - stdout=PIPE, - stderr=PIPE, - shell=False) + env["LC_ALL"] = "C" + from subprocess import PIPE, Popen + + process = Popen( + [patch_exe] + patch_args, + cwd=tmp_src_dir, + stdout=PIPE, + stderr=PIPE, + shell=False, + ) output, error = process.communicate() - result['offsets'] = b'offset' in output - result['fuzzy'] = b'fuzz' in output - result['stderr'] = bool(error) + result["offsets"] = b"offset" in output + result["fuzzy"] = b"fuzz" in output + result["stderr"] = bool(error) if stdout: stdout.write(output) if stderr: @@ -715,12 +886,12 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): else: result[check_name] = fmt # Save the first one found. - if check_name == 'applicable' and not result['args']: - result['args'] = patch_args + if check_name == "applicable" and not result["args"]: + result["args"] = patch_args break - if not retained_tmpdir and 'patches' in result: - del result['patches'] + if not retained_tmpdir and "patches" in result: + del result["patches"] return result @@ -728,7 +899,7 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): def apply_one_patch(src_dir, recipe_dir, rel_path, config, git=None): path = os.path.join(recipe_dir, rel_path) if config.verbose: - print(f'Applying patch: {path}') + print(f"Applying patch: {path}") def try_apply_patch(patch, patch_args, cwd, stdout, stderr): # An old reference: https://unix.stackexchange.com/a/243748/34459 @@ -757,32 +928,38 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): # Some may bemoan the loss of patch failure artifacts, but it is fairly random which # patch and patch attempt they apply to so their informational value is low, besides that, # they are ugly. - temp_name = os.path.join(tempfile.gettempdir(), next(tempfile._get_candidate_names())) - base_patch_args = ['--no-backup-if-mismatch', '--batch'] + patch_args + temp_name = os.path.join( + tempfile.gettempdir(), next(tempfile._get_candidate_names()) + ) + base_patch_args = ["--no-backup-if-mismatch", "--batch"] + patch_args try: try_patch_args = base_patch_args[:] - try_patch_args.append('--dry-run') + try_patch_args.append("--dry-run") log.debug(f"dry-run applying with\n{patch} {try_patch_args}") - check_call_env([patch] + try_patch_args, cwd=cwd, stdout=stdout, stderr=stderr) + check_call_env( + [patch] + try_patch_args, cwd=cwd, stdout=stdout, stderr=stderr + ) # You can use this to pretend the patch failed so as to test reversal! # raise CalledProcessError(-1, ' '.join([patch] + patch_args)) except Exception as e: raise e else: - check_call_env([patch] + base_patch_args, cwd=cwd, stdout=stdout, stderr=stderr) + check_call_env( + [patch] + base_patch_args, cwd=cwd, stdout=stdout, stderr=stderr + ) finally: if os.path.exists(temp_name): os.unlink(temp_name) exception = None if not isfile(path): - raise RuntimeError('Error: no such patch: %s' % path) + raise RuntimeError(f"Error: no such patch: {path}") if config.verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL @@ -794,29 +971,39 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): if not patch_exe: raise MissingDependency("Failed to find conda-build dependency: 'patch'") with TemporaryDirectory() as tmpdir: - patch_attributes = _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, tmpdir) - attributes_output += _patch_attributes_debug(patch_attributes, rel_path, config.build_prefix) - if git and patch_attributes['format'] == 'git': + patch_attributes = _get_patch_attributes( + path, patch_exe, git, src_dir, stdout, stderr, tmpdir + ) + attributes_output += _patch_attributes_debug( + patch_attributes, rel_path, config.build_prefix + ) + if git and patch_attributes["format"] == "git": # Prevents git from asking interactive questions, # also necessary to achieve sha1 reproducibility; # as is --committer-date-is-author-date. By this, # we mean a round-trip of git am/git format-patch # gives the same file. git_env = os.environ - git_env['GIT_COMMITTER_NAME'] = 'conda-build' - git_env['GIT_COMMITTER_EMAIL'] = 'conda@conda-build.org' - check_call_env([git, 'am', '-3', '--committer-date-is-author-date', path], - cwd=src_dir, stdout=stdout, stderr=stderr, env=git_env) + git_env["GIT_COMMITTER_NAME"] = "conda-build" + git_env["GIT_COMMITTER_EMAIL"] = "conda@conda-build.org" + check_call_env( + [git, "am", "-3", "--committer-date-is-author-date", path], + cwd=src_dir, + stdout=stdout, + stderr=stderr, + env=git_env, + ) config.git_commits_since_tag += 1 else: - patch_args = patch_attributes['args'] + patch_args = patch_attributes["args"] if config.verbose: - print(f'Applying patch: {path} with args:\n{patch_args}') + print(f"Applying patch: {path} with args:\n{patch_args}") try: - try_apply_patch(patch_exe, patch_args, - cwd=src_dir, stdout=stdout, stderr=stderr) + try_apply_patch( + patch_exe, patch_args, cwd=src_dir, stdout=stdout, stderr=stderr + ) except Exception as e: exception = e if exception: @@ -825,7 +1012,9 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): def apply_patch(src_dir, patch, config, git=None): - apply_one_patch(src_dir, os.path.dirname(patch), os.path.basename(patch), config, git) + apply_one_patch( + src_dir, os.path.dirname(patch), os.path.basename(patch), config, git + ) def provide(metadata): @@ -835,47 +1024,63 @@ def provide(metadata): - unpack - apply patches (if any) """ - meta = metadata.get_section('source') - if not os.path.isdir(metadata.config.build_folder): - os.makedirs(metadata.config.build_folder) + os.makedirs(metadata.config.build_folder, exist_ok=True) git = None - if hasattr(meta, 'keys'): - dicts = [meta] - else: - dicts = meta - try: - for source_dict in dicts: - folder = source_dict.get('folder') - src_dir = os.path.join(metadata.config.work_dir, folder if folder else '') - if any(k in source_dict for k in ('fn', 'url')): - unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path, - croot=metadata.config.croot, verbose=metadata.config.verbose, - timeout=metadata.config.timeout, locking=metadata.config.locking) - elif 'git_url' in source_dict: - git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path, - verbose=metadata.config.verbose) + for source_dict in metadata.get_section("source"): + folder = source_dict.get("folder") + src_dir = os.path.join(metadata.config.work_dir, folder if folder else "") + if any(k in source_dict for k in ("fn", "url")): + unpack( + source_dict, + src_dir, + metadata.config.src_cache, + recipe_path=metadata.path, + croot=metadata.config.croot, + verbose=metadata.config.verbose, + timeout=metadata.config.timeout, + locking=metadata.config.locking, + ) + elif "git_url" in source_dict: + git = git_source( + source_dict, + metadata.config.git_cache, + src_dir, + metadata.path, + verbose=metadata.config.verbose, + ) # build to make sure we have a work directory with source in it. We # want to make sure that whatever version that is does not # interfere with the test we run next. - elif 'hg_url' in source_dict: - hg_source(source_dict, src_dir, metadata.config.hg_cache, - verbose=metadata.config.verbose) - elif 'svn_url' in source_dict: - svn_source(source_dict, src_dir, metadata.config.svn_cache, - verbose=metadata.config.verbose, timeout=metadata.config.timeout, - locking=metadata.config.locking) - elif 'path' in source_dict: - source_path = os.path.expanduser(source_dict['path']) + elif "hg_url" in source_dict: + hg_source( + source_dict, + src_dir, + metadata.config.hg_cache, + verbose=metadata.config.verbose, + ) + elif "svn_url" in source_dict: + svn_source( + source_dict, + src_dir, + metadata.config.svn_cache, + verbose=metadata.config.verbose, + timeout=metadata.config.timeout, + locking=metadata.config.locking, + ) + elif "path" in source_dict: + source_path = os.path.expanduser(source_dict["path"]) path = normpath(abspath(join(metadata.path, source_path))) - path_via_symlink = 'path_via_symlink' in source_dict + path_via_symlink = "path_via_symlink" in source_dict if path_via_symlink and not folder: - print("WARNING: `path_via_symlink` is too dangerous without specifying a folder,\n" - " conda could end up changing - or deleting - your local source code!\n" - " Going to make copies instead. When using `path_via_symlink` you should\n" - " also take care to run the build outside of your local source code folder(s)\n" - " unless that is your intention.") + print( + "WARNING: `path_via_symlink` is too dangerous without specifying a folder,\n" + " conda could end up changing - or deleting - your local source code!\n" + " Going to make copies instead. When using `path_via_symlink` you should\n" + " also take care to run the build outside of your local source code folder(s)\n" + " unless that is your intention." + ) path_via_symlink = False sys.exit(1) if path_via_symlink: @@ -890,20 +1095,30 @@ def provide(metadata): print(f"Copying {path} to {src_dir}") # careful here: we set test path to be outside of conda-build root in setup.cfg. # If you don't do that, this is a recursive function - copy_into(path, src_dir, metadata.config.timeout, symlinks=True, - locking=metadata.config.locking, clobber=True) + copy_into( + path, + src_dir, + metadata.config.timeout, + symlinks=True, + locking=metadata.config.locking, + clobber=True, + ) else: # no source if not isdir(src_dir): os.makedirs(src_dir) - patches = ensure_list(source_dict.get('patches', [])) + patches = ensure_list(source_dict.get("patches", [])) patch_attributes_output = [] for patch in patches: - patch_attributes_output += [apply_one_patch(src_dir, metadata.path, patch, metadata.config, git)] + patch_attributes_output += [ + apply_one_patch(src_dir, metadata.path, patch, metadata.config, git) + ] _patch_attributes_debug_print(patch_attributes_output) except CalledProcessError: - shutil.move(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide') + shutil.move( + metadata.config.work_dir, metadata.config.work_dir + "_failed_provide" + ) raise return metadata.config.work_dir diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index ac8cebd769..374422f1e1 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -1,19 +1,19 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from os.path import basename, normpath import tarfile +from os.path import basename, normpath -from conda_build.utils import codec, filter_info_files +from .utils import codec, filter_info_files def dist_fn(fn): - if fn.endswith('.tar'): + if fn.endswith(".tar"): return fn[:-4] - elif fn.endswith('.tar.bz2'): + elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception('did not expect filename: %r' % fn) + raise Exception(f"did not expect filename: {fn!r}") class TarCheck: @@ -21,7 +21,9 @@ def __init__(self, path, config): self.t = tarfile.open(path) self.paths = {m.path for m in self.t.getmembers()} self.dist = dist_fn(basename(path)) - self.name, self.version, self.build = self.dist.split('::', 1)[-1].rsplit('-', 2) + self.name, self.version, self.build = self.dist.split("::", 1)[-1].rsplit( + "-", 2 + ) self.config = config def __enter__(self): @@ -31,57 +33,64 @@ def __exit__(self, e_type, e_value, traceback): self.t.close() def info_files(self): - lista = [normpath(p.strip().decode('utf-8')) for p in - self.t.extractfile('info/files').readlines()] + lista = [ + normpath(p.strip().decode("utf-8")) + for p in self.t.extractfile("info/files").readlines() + ] seta = set(lista) if len(lista) != len(seta): - raise Exception('info/files: duplicates') + raise Exception("info/files: duplicates") files_in_tar = [normpath(m.path) for m in self.t.getmembers()] - files_in_tar = filter_info_files(files_in_tar, '') + files_in_tar = filter_info_files(files_in_tar, "") setb = set(files_in_tar) if len(files_in_tar) != len(setb): - raise Exception('info_files: duplicate members') + raise Exception("info_files: duplicate members") if seta == setb: return for p in sorted(seta | setb): if p not in seta: - print('%r not in info/files' % p) + print(f"{p!r} not in info/files") if p not in setb: - print('%r not in tarball' % p) - raise Exception('info/files') + print(f"{p!r} not in tarball") + raise Exception("info/files") def index_json(self): - info = json.loads(self.t.extractfile('info/index.json').read().decode('utf-8')) - for varname in 'name', 'version': + info = json.loads(self.t.extractfile("info/index.json").read().decode("utf-8")) + for varname in "name", "version": if info[varname] != getattr(self, varname): - raise Exception('{}: {!r} != {!r}'.format(varname, info[varname], - getattr(self, varname))) - assert isinstance(info['build_number'], int) + raise Exception( + f"{varname}: {info[varname]!r} != {getattr(self, varname)!r}" + ) + assert isinstance(info["build_number"], int) def prefix_length(self): prefix_length = None - if 'info/has_prefix' in self.t.getnames(): - prefix_files = self.t.extractfile('info/has_prefix').readlines() + if "info/has_prefix" in self.t.getnames(): + prefix_files = self.t.extractfile("info/has_prefix").readlines() for line in prefix_files: try: prefix, file_type, _ = line.split() # lines not conforming to the split except ValueError: continue - if hasattr(file_type, 'decode'): + if hasattr(file_type, "decode"): file_type = file_type.decode(codec) - if file_type == 'binary': + if file_type == "binary": prefix_length = len(prefix) break return prefix_length def correct_subdir(self): - info = json.loads(self.t.extractfile('info/index.json').read().decode('utf-8')) - assert info['subdir'] in [self.config.host_subdir, 'noarch', self.config.target_subdir], \ - ("Inconsistent subdir in package - index.json expecting {}," - " got {}".format(self.config.host_subdir, info['subdir'])) + info = json.loads(self.t.extractfile("info/index.json").read().decode("utf-8")) + assert info["subdir"] in [ + self.config.host_subdir, + "noarch", + self.config.target_subdir, + ], "Inconsistent subdir in package - index.json expecting {}, got {}".format( + self.config.host_subdir, info["subdir"] + ) def check_all(path, config): diff --git a/conda_build/utils.py b/conda_build/utils.py index 37d978fca5..796f849caf 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1,105 +1,94 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict, defaultdict +from __future__ import annotations + import contextlib import fnmatch -from functools import lru_cache import hashlib -from itertools import filterfalse import json -from locale import getpreferredencoding -import libarchive import logging import logging.config import mmap import os -from os.path import (dirname, getmtime, getsize, isdir, join, isfile, abspath, islink, - expanduser, expandvars) import re +import shutil import stat import subprocess import sys -import shutil import tarfile import tempfile -from threading import Thread import time -from pathlib import Path - -try: - from json.decoder import JSONDecodeError -except ImportError: - JSONDecodeError = ValueError - -import yaml - -import filelock -import conda_package_handling.api - -try: - from conda.base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 -except Exception: - from conda.base.constants import CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1 - CONDA_PACKAGE_EXTENSION_V2 = ".conda" - CONDA_PACKAGE_EXTENSIONS = (CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) - -from conda.api import PackageCacheData # noqa - -from .conda_interface import hashsum_file, md5_file, unix_path_to_win, win_path_to_unix # noqa -from .conda_interface import root_dir, pkgs_dirs # noqa -from .conda_interface import StringIO # noqa -from .conda_interface import VersionOrder, MatchSpec # noqa -from .conda_interface import cc_conda_build # noqa -from .conda_interface import conda_43, conda_46, Dist # noqa -from .conda_interface import context # noqa -from .conda_interface import download, TemporaryDirectory, get_conda_channel, CondaHTTPError # noqa -# NOQA because it is not used in this file. -from conda_build.conda_interface import rm_rf as _rm_rf # noqa -from conda_build.exceptions import BuildLockError # noqa -from conda_build.os_utils import external # noqa - import urllib.parse as urlparse import urllib.request as urllib +from collections import OrderedDict, defaultdict +from functools import lru_cache +from glob import glob +from io import StringIO +from itertools import filterfalse +from json.decoder import JSONDecodeError +from locale import getpreferredencoding +from os import walk +from os.path import ( + abspath, + dirname, + expanduser, + expandvars, + getmtime, + getsize, + isdir, + isfile, + islink, + join, +) +from pathlib import Path +from threading import Thread +from typing import TYPE_CHECKING, Iterable, overload -from glob import glob as glob_glob - - -# stdlib glob is less feature-rich but considerably faster than glob2 -def glob(pathname, recursive=True): - return glob_glob(pathname, recursive=recursive) - - -# NOQA because it is not used in this file. -from contextlib import ExitStack # NOQA -PermissionError = PermissionError # NOQA -FileNotFoundError = FileNotFoundError - -on_win = (sys.platform == 'win32') +import conda_package_handling.api +import filelock +import libarchive +import yaml +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSION_V1, # noqa: F401 + CONDA_PACKAGE_EXTENSION_V2, # noqa: F401 + CONDA_PACKAGE_EXTENSIONS, + KNOWN_SUBDIRS, +) +from conda.base.context import context +from conda.common.path import win_path_to_unix +from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.read import compute_sum +from conda.models.channel import Channel +from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder +from conda.utils import unix_path_to_win + +from .deprecations import deprecated +from .exceptions import BuildLockError + +if TYPE_CHECKING: + from typing import Mapping, TypeVar + + from .metadata import MetaData + + T = TypeVar("T") + K = TypeVar("K") + V = TypeVar("V") -codec = getpreferredencoding() or 'utf-8' on_win = sys.platform == "win32" -root_script_dir = os.path.join(root_dir, 'Scripts' if on_win else 'bin') +on_mac = sys.platform == "darwin" +on_linux = sys.platform == "linux" + +codec = getpreferredencoding() or "utf-8" +root_script_dir = os.path.join(context.root_prefix, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE -DEFAULT_SUBDIRS = { - "linux-64", - "linux-32", - "linux-s390x", - "linux-ppc64", - "linux-ppc64le", - "linux-armv6l", - "linux-armv7l", - "linux-aarch64", - "win-64", - "win-32", - "win-arm64", - "osx-64", - "osx-arm64", - "zos-z", - "noarch", -} +DEFAULT_SUBDIRS = set(KNOWN_SUBDIRS) RUN_EXPORTS_TYPES = { "weak", @@ -124,11 +113,6 @@ def glob(pathname, recursive=True): # filenames accepted as recipe meta files VALID_METAS = ("meta.yaml", "meta.yml", "conda.yaml", "conda.yml") -try: - from os import scandir, walk # NOQA -except ImportError: - from scandir import walk - @lru_cache(maxsize=None) def stat_file(path): @@ -156,19 +140,19 @@ def directory_size_slow(path): def directory_size(path): - ''' - ''' try: if on_win: command = 'dir /s "{}"' # Windows path can have spaces out = subprocess.check_output(command.format(path), shell=True) else: command = "du -s {}" - out = subprocess.check_output(command.format(path).split(), stderr=subprocess.PIPE) + out = subprocess.check_output( + command.format(path).split(), stderr=subprocess.PIPE + ) - if hasattr(out, 'decode'): + if hasattr(out, "decode"): try: - out = out.decode(errors='ignore') + out = out.decode(errors="ignore") # This isn't important anyway so give up. Don't try search on bytes. except (UnicodeDecodeError, IndexError): if on_win: @@ -177,10 +161,10 @@ def directory_size(path): pass if on_win: # Windows can give long output, we need only 2nd to last line - out = out.strip().rsplit('\r\n', 2)[-2] + out = out.strip().rsplit("\r\n", 2)[-2] pattern = r"\s([\d\W]+).+" # Language and punctuation neutral out = re.search(pattern, out.strip()).group(1).strip() - out = out.replace(',', '').replace('.', '').replace(' ', '') + out = out.replace(",", "").replace(".", "").replace(" ", "") else: out = out.split()[0] except subprocess.CalledProcessError: @@ -215,11 +199,11 @@ def _setup_rewrite_pipe(env): replacements[v] = k r_fd, w_fd = os.pipe() - r = os.fdopen(r_fd, 'rt') - if sys.platform == 'win32': - replacement_t = '%{}%' + r = os.fdopen(r_fd, "rt") + if on_win: + replacement_t = "%{}%" else: - replacement_t = '${}' + replacement_t = "${}" def rewriter(): while True: @@ -236,7 +220,7 @@ def rewriter(): except UnicodeDecodeError: try: txt = os.read(r, 10000) - sys.stdout.write(txt or '') + sys.stdout.write(txt or "") except TypeError: pass @@ -264,19 +248,26 @@ def __init__(self, *args, **kwargs): def _execute(self, *args, **kwargs): try: import psutil - psutil_exceptions = psutil.NoSuchProcess, psutil.AccessDenied, psutil.NoSuchProcess + + psutil_exceptions = ( + psutil.NoSuchProcess, + psutil.AccessDenied, + psutil.NoSuchProcess, + ) except ImportError as e: psutil = None psutil_exceptions = (OSError, ValueError) log = get_logger(__name__) log.warn(f"psutil import failed. Error was {e}") - log.warn("only disk usage and time statistics will be available. Install psutil to " - "get CPU time and memory usage statistics.") + log.warn( + "only disk usage and time statistics will be available. Install psutil to " + "get CPU time and memory usage statistics." + ) # The polling interval (in seconds) - time_int = kwargs.pop('time_int', 2) + time_int = kwargs.pop("time_int", 2) - disk_usage_dir = kwargs.get('cwd', sys.prefix) + disk_usage_dir = kwargs.get("cwd", sys.prefix) # Create a process of this (the parent) process parent = psutil.Process(os.getpid()) if psutil else DummyPsutilProcess() @@ -285,7 +276,11 @@ def _execute(self, *args, **kwargs): # Using the convenience Popen class provided by psutil start_time = time.time() - _popen = psutil.Popen(*args, **kwargs) if psutil else subprocess.Popen(*args, **kwargs) + _popen = ( + psutil.Popen(*args, **kwargs) + if psutil + else subprocess.Popen(*args, **kwargs) + ) try: while self.returncode is None: # We need to get all of the children of our process since our @@ -306,8 +301,8 @@ def _execute(self, *args, **kwargs): # we are instead looping over children and getting each individually. # https://psutil.readthedocs.io/en/latest/#psutil.Process.cpu_times cpu_stats = child.cpu_times() - child_cpu_usage['sys'] = cpu_stats.system - child_cpu_usage['user'] = cpu_stats.user + child_cpu_usage["sys"] = cpu_stats.system + child_cpu_usage["user"] = cpu_stats.user cpu_usage[child.pid] = child_cpu_usage except psutil_exceptions: # process already died. Just ignore it. @@ -317,8 +312,8 @@ def _execute(self, *args, **kwargs): # Sum the memory usage of all the children together (2D columnwise sum) self.rss = max(rss, self.rss) self.vms = max(vms, self.vms) - self.cpu_sys = sum(child['sys'] for child in cpu_usage.values()) - self.cpu_user = sum(child['user'] for child in cpu_usage.values()) + self.cpu_sys = sum(child["sys"] for child in cpu_usage.values()) + self.cpu_user = sum(child["user"] for child in cpu_usage.values()) self.processes = max(processes, self.processes) # Get disk usage @@ -337,74 +332,83 @@ def _execute(self, *args, **kwargs): return _popen.stdout, _popen.stderr def __repr__(self): - return str({'elapsed': self.elapsed, - 'rss': self.rss, - 'vms': self.vms, - 'disk': self.disk, - 'processes': self.processes, - 'cpu_user': self.cpu_user, - 'cpu_sys': self.cpu_sys, - 'returncode': self.returncode}) + return str( + { + "elapsed": self.elapsed, + "rss": self.rss, + "vms": self.vms, + "disk": self.disk, + "processes": self.processes, + "cpu_user": self.cpu_user, + "cpu_sys": self.cpu_sys, + "returncode": self.returncode, + } + ) def _func_defaulting_env_to_os_environ(func, *popenargs, **kwargs): - if 'env' not in kwargs: + if "env" not in kwargs: kwargs = kwargs.copy() env_copy = os.environ.copy() - kwargs.update({'env': env_copy}) - kwargs['env'] = {str(key): str(value) for key, value in kwargs['env'].items()} + kwargs.update({"env": env_copy}) + kwargs["env"] = {str(key): str(value) for key, value in kwargs["env"].items()} _args = [] - if 'stdin' not in kwargs: - kwargs['stdin'] = subprocess.PIPE + if "stdin" not in kwargs: + kwargs["stdin"] = subprocess.PIPE for arg in popenargs: # arguments to subprocess need to be bytestrings - if sys.version_info.major < 3 and hasattr(arg, 'encode'): + if sys.version_info.major < 3 and hasattr(arg, "encode"): arg = arg.encode(codec) - elif sys.version_info.major >= 3 and hasattr(arg, 'decode'): + elif sys.version_info.major >= 3 and hasattr(arg, "decode"): arg = arg.decode(codec) _args.append(str(arg)) - stats = kwargs.get('stats') - if 'stats' in kwargs: - del kwargs['stats'] + stats = kwargs.get("stats") + if "stats" in kwargs: + del kwargs["stats"] - rewrite_stdout_env = kwargs.pop('rewrite_stdout_env', None) + rewrite_stdout_env = kwargs.pop("rewrite_stdout_env", None) if rewrite_stdout_env: - kwargs['stdout'] = _setup_rewrite_pipe(rewrite_stdout_env) + kwargs["stdout"] = _setup_rewrite_pipe(rewrite_stdout_env) out = None if stats is not None: proc = PopenWrapper(_args, **kwargs) - if func == 'output': + if func == "output": out = proc.out.read() if proc.returncode != 0: raise subprocess.CalledProcessError(proc.returncode, _args) - stats.update({'elapsed': proc.elapsed, - 'disk': proc.disk, - 'processes': proc.processes, - 'cpu_user': proc.cpu_user, - 'cpu_sys': proc.cpu_sys, - 'rss': proc.rss, - 'vms': proc.vms}) + stats.update( + { + "elapsed": proc.elapsed, + "disk": proc.disk, + "processes": proc.processes, + "cpu_user": proc.cpu_user, + "cpu_sys": proc.cpu_sys, + "rss": proc.rss, + "vms": proc.vms, + } + ) else: - if func == 'call': + if func == "call": subprocess.check_call(_args, **kwargs) else: - if 'stdout' in kwargs: - del kwargs['stdout'] + if "stdout" in kwargs: + del kwargs["stdout"] out = subprocess.check_output(_args, **kwargs) return out def check_call_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('call', *popenargs, **kwargs) + return _func_defaulting_env_to_os_environ("call", *popenargs, **kwargs) def check_output_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('output', stdout=subprocess.PIPE, - *popenargs, **kwargs).rstrip() + return _func_defaulting_env_to_os_environ( + "output", stdout=subprocess.PIPE, *popenargs, **kwargs + ).rstrip() def bytes2human(n): @@ -413,15 +417,15 @@ def bytes2human(n): # '9.8K' # >>> bytes2human(100001221) # '95.4M' - symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y") prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] - return f'{value:.1f}{s}' - return "%sB" % n + return f"{value:.1f}{s}" + return f"{n}B" def seconds2human(s): @@ -438,20 +442,23 @@ def get_recipe_abspath(recipe): and needs cleanup. """ if isfile(recipe): - if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): + if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith( + CONDA_PACKAGE_EXTENSIONS + ): recipe_dir = tempfile.mkdtemp() if recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): import conda_package_handling.api + conda_package_handling.api.extract(recipe, recipe_dir) else: tar_xf(recipe, recipe_dir) # At some stage the old build system started to tar up recipes. - recipe_tarfile = os.path.join(recipe_dir, 'info', 'recipe.tar') + recipe_tarfile = os.path.join(recipe_dir, "info", "recipe.tar") if isfile(recipe_tarfile): - tar_xf(recipe_tarfile, os.path.join(recipe_dir, 'info')) + tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: - print("Ignoring non-recipe: %s" % recipe) + print(f"Ignoring non-recipe: {recipe}") return (None, None) else: recipe_dir = abspath(os.path.join(os.getcwd(), recipe)) @@ -471,7 +478,7 @@ def try_acquire_locks(locks, timeout): http://stackoverflow.com/questions/9814008/multiple-mutex-locking-strategies-and-why-libraries-dont-use-address-comparison """ t = time.time() - while (time.time() - t < timeout): + while time.time() - t < timeout: # Continuously try to acquire all locks. # By passing a short timeout to each individual lock, we give other # processes that might be trying to acquire the same locks (and may @@ -495,7 +502,7 @@ def try_acquire_locks(locks, timeout): # If we reach this point, we weren't able to acquire all locks within # the specified timeout. We shouldn't be holding any locks anymore at # this point, so we just raise an exception. - raise BuildLockError('Failed to acquire all locks') + raise BuildLockError("Failed to acquire all locks") try: yield @@ -518,8 +525,12 @@ def _copy_with_shell_fallback(src, dst): continue if not is_copied: try: - subprocess.check_call(f'cp -a {src} {dst}', shell=True, - stderr=subprocess.PIPE, stdout=subprocess.PIPE) + subprocess.check_call( + f"cp -a {src} {dst}", + shell=True, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) except subprocess.CalledProcessError as e: if not os.path.isfile(dst): raise OSError(f"Failed to copy {src} to {dst}. Error was: {e}") @@ -534,7 +545,9 @@ def get_prefix_replacement_paths(src, dst): return os.path.join(*ssplit), os.path.join(*dsplit) -def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False): +def copy_into( + src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False +): """Copy all the files and directories in src to the directory dst""" log = get_logger(__name__) if symlinks and islink(src): @@ -555,7 +568,15 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl except: pass # lchmod not available elif isdir(src): - merge_tree(src, dst, symlinks, timeout=timeout, lock=lock, locking=locking, clobber=clobber) + merge_tree( + src, + dst, + symlinks, + timeout=timeout, + lock=lock, + locking=locking, + clobber=clobber, + ) else: if isdir(dst): @@ -574,7 +595,7 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl src_folder = os.getcwd() if os.path.islink(src) and not os.path.exists(os.path.realpath(src)): - log.warn('path %s is a broken symlink - ignoring copy', src) + log.warn("path %s is a broken symlink - ignoring copy", src) return if not lock and locking: @@ -591,8 +612,9 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl try: _copy_with_shell_fallback(src, dst_fn) except shutil.Error: - log.debug("skipping %s - already exists in %s", - os.path.basename(src), dst) + log.debug( + "skipping %s - already exists in %s", os.path.basename(src), dst + ) def move_with_fallback(src, dst): @@ -604,7 +626,9 @@ def move_with_fallback(src, dst): os.unlink(src) except PermissionError: log = get_logger(__name__) - log.debug(f"Failed to copy/remove path from {src} to {dst} due to permission error") + log.debug( + f"Failed to copy/remove path from {src} to {dst} due to permission error" + ) # http://stackoverflow.com/a/22331852/1170370 @@ -618,8 +642,8 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): lst = [x for x in lst if x not in excl] # do not copy lock files - if '.conda_lock' in lst: - lst.remove('.conda_lock') + if ".conda_lock" in lst: + lst.remove(".conda_lock") dst_lst = [os.path.join(dst, item) for item in lst] @@ -645,7 +669,9 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): return dst_lst -def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False): +def merge_tree( + src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False +): """ Merge src into dst recursively by copying all files from src into dst. Return a list of all files copied. @@ -655,17 +681,18 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c """ dst = os.path.normpath(os.path.normcase(dst)) src = os.path.normpath(os.path.normcase(src)) - assert not dst.startswith(src), ("Can't merge/copy source into subdirectory of itself. " - "Please create separate spaces for these things.\n" - " src: {}\n" - " dst: {}".format(src, dst)) + assert not dst.startswith(src), ( + "Can't merge/copy source into subdirectory of itself. " + "Please create separate spaces for these things.\n" + f" src: {src}\n" + f" dst: {dst}" + ) new_files = copytree(src, dst, symlinks=symlinks, dry_run=True) existing = [f for f in new_files if isfile(f)] if existing and not clobber: - raise OSError("Can't merge {} into {}: file exists: " - "{}".format(src, dst, existing[0])) + raise OSError(f"Can't merge {src} into {dst}: file exists: {existing[0]}") locks = [] if locking: @@ -679,8 +706,10 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c # purpose here is that we want *one* lock per location on disk. It can be locked or unlocked # at any time, but the lock within this process should all be tied to the same tracking # mechanism. -_lock_folders = (os.path.join(root_dir, 'locks'), - os.path.expanduser(os.path.join('~', '.conda_build_locks'))) +_lock_folders = ( + os.path.join(context.root_prefix, "locks"), + os.path.expanduser(os.path.join("~", ".conda_build_locks")), +) def get_lock(folder, timeout=900): @@ -690,28 +719,30 @@ def get_lock(folder, timeout=900): except OSError: location = folder b_location = location - if hasattr(b_location, 'encode'): + if hasattr(b_location, "encode"): b_location = b_location.encode() # Hash the entire filename to avoid collisions. lock_filename = hashlib.sha256(b_location).hexdigest() - if hasattr(lock_filename, 'decode'): + if hasattr(lock_filename, "decode"): lock_filename = lock_filename.decode() for locks_dir in _lock_folders: try: if not os.path.isdir(locks_dir): os.makedirs(locks_dir) lock_file = os.path.join(locks_dir, lock_filename) - with open(lock_file, 'w') as f: + with open(lock_file, "w") as f: f.write("") fl = filelock.FileLock(lock_file, timeout) break except OSError: continue else: - raise RuntimeError("Could not write locks folder to either system location ({})" - "or user location ({}). Aborting.".format(*_lock_folders)) + raise RuntimeError( + "Could not write locks folder to either system location ({})" + "or user location ({}). Aborting.".format(*_lock_folders) + ) return fl @@ -720,56 +751,56 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): bldpkgs_dirs = ensure_list(bldpkgs_dirs) # locks enabled by default if locking: - _pkgs_dirs = pkgs_dirs[:1] - locked_folders = _pkgs_dirs + list(bldpkgs_dirs) - for folder in locked_folders: + for folder in (*context.pkgs_dirs[:1], *bldpkgs_dirs): if not os.path.isdir(folder): os.makedirs(folder) lock = get_lock(folder, timeout=timeout) locks.append(lock) # lock used to generally indicate a conda operation occurring - locks.append(get_lock('conda-operation', timeout=timeout)) + locks.append(get_lock("conda-operation", timeout=timeout)) return locks -def relative(f, d='lib'): - assert not f.startswith('/'), f - assert not d.startswith('/'), d - d = d.strip('/').split('/') - if d == ['.']: - d = [] - f = dirname(f).split('/') - if f == ['']: - f = [] - while d and f and d[0] == f[0]: - d.pop(0) - f.pop(0) - return '/'.join(((['..'] * len(f)) if f else ['.']) + d) - - # This is the lowest common denominator of the formats supported by our libarchive/python-libarchive-c # packages across all platforms -decompressible_exts = ('.7z', '.tar', '.tar.bz2', '.tar.gz', '.tar.lzma', '.tar.xz', - '.tar.z', '.tar.zst', '.tgz', '.whl', '.zip', '.rpm', '.deb') - - -def _tar_xf_fallback(tarball, dir_path, mode='r:*'): - if tarball.lower().endswith('.tar.z'): - uncompress = external.find_executable('uncompress') +decompressible_exts = ( + ".7z", + ".tar", + ".tar.bz2", + ".tar.gz", + ".tar.lzma", + ".tar.xz", + ".tar.z", + ".tar.zst", + ".tgz", + ".whl", + ".zip", + ".rpm", + ".deb", +) + + +def _tar_xf_fallback(tarball, dir_path, mode="r:*"): + from .os_utils.external import find_executable + + if tarball.lower().endswith(".tar.z"): + uncompress = find_executable("uncompress") if not uncompress: - uncompress = external.find_executable('gunzip') + uncompress = find_executable("gunzip") if not uncompress: - sys.exit("""\ + sys.exit( + """\ uncompress (or gunzip) is required to unarchive .z source files. -""") - check_call_env([uncompress, '-f', tarball]) +""" + ) + check_call_env([uncompress, "-f", tarball]) tarball = tarball[:-2] t = tarfile.open(tarball, mode) members = t.getmembers() for i, member in enumerate(members, 0): if os.path.isabs(member.name): - member.name = os.path.relpath(member.name, '/') + member.name = os.path.relpath(member.name, "/") cwd = os.path.realpath(os.getcwd()) if not os.path.realpath(member.name).startswith(cwd): member.name = member.name.replace("../", "") @@ -782,7 +813,6 @@ def _tar_xf_fallback(tarball, dir_path, mode='r:*'): def tar_xf_file(tarball, entries): - from conda_build.utils import ensure_list entries = ensure_list(entries) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) @@ -814,11 +844,13 @@ def tar_xf_getnames(tarball): def tar_xf(tarball, dir_path): - flags = libarchive.extract.EXTRACT_TIME | \ - libarchive.extract.EXTRACT_PERM | \ - libarchive.extract.EXTRACT_SECURE_NODOTDOT | \ - libarchive.extract.EXTRACT_SECURE_SYMLINKS | \ - libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + flags = ( + libarchive.extract.EXTRACT_TIME + | libarchive.extract.EXTRACT_PERM + | libarchive.extract.EXTRACT_SECURE_NODOTDOT + | libarchive.extract.EXTRACT_SECURE_SYMLINKS + | libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + ) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) try: @@ -828,20 +860,24 @@ def tar_xf(tarball, dir_path): # try again, maybe we are on Windows and the archive contains symlinks # https://github.com/conda/conda-build/issues/3351 # https://github.com/libarchive/libarchive/pull/1030 - if tarball.lower().endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tar.z', '.tar.xz')): + if tarball.lower().endswith( + (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tar.z", ".tar.xz") + ): _tar_xf_fallback(tarball, dir_path) else: raise def file_info(path): - return {'size': getsize(path), - 'md5': md5_file(path), - 'sha256': hashsum_file(path, 'sha256'), - 'mtime': getmtime(path)} + return { + "size": getsize(path), + "md5": compute_sum(path, "md5"), + "sha256": compute_sum(path, "sha256"), + "mtime": getmtime(path), + } -def comma_join(items): +def comma_join(items: Iterable[str], conjunction: str = "and") -> str: """ Like ', '.join(items) but with and @@ -854,7 +890,10 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ' and '.join(items) if len(items) <= 2 else ', '.join(items[:-1]) + ', and ' + items[-1] + items = tuple(items) + if len(items) <= 2: + return f"{items[0]} {conjunction} {items[1]}" + return f"{', '.join(items[:-1])}, {conjunction} {items[-1]}" def safe_print_unicode(*args, **kwargs): @@ -867,12 +906,12 @@ def safe_print_unicode(*args, **kwargs): :param end: ending character (defaults to '\n') :param errors: error handler for encoding errors (defaults to 'replace') """ - sep = kwargs.pop('sep', ' ') - end = kwargs.pop('end', '\n') - errors = kwargs.pop('errors', 'replace') + sep = kwargs.pop("sep", " ") + end = kwargs.pop("end", "\n") + errors = kwargs.pop("errors", "replace") func = sys.stdout.buffer.write line = sep.join(args) + end - encoding = sys.stdout.encoding or 'utf8' + encoding = sys.stdout.encoding or "utf8" func(line.encode(encoding, errors)) @@ -901,7 +940,9 @@ def rec_glob(path, patterns, ignores=None): def convert_unix_path_to_win(path): - if external.find_executable('cygpath'): + from .os_utils.external import find_executable + + if find_executable("cygpath"): cmd = f"cygpath -w {path}" path = subprocess.getoutput(cmd) @@ -911,7 +952,9 @@ def convert_unix_path_to_win(path): def convert_win_path_to_unix(path): - if external.find_executable('cygpath'): + from .os_utils.external import find_executable + + if find_executable("cygpath"): cmd = f"cygpath -u {path}" path = subprocess.getoutput(cmd) @@ -923,44 +966,49 @@ def convert_win_path_to_unix(path): # Used for translating local paths into url (file://) paths # http://stackoverflow.com/a/14298190/1170370 def path2url(path): - return urlparse.urljoin('file:', urllib.pathname2url(path)) + return urlparse.urljoin("file:", urllib.pathname2url(path)) def get_stdlib_dir(prefix, py_ver): - if sys.platform == 'win32': - lib_dir = os.path.join(prefix, 'Lib') + if on_win: + lib_dir = os.path.join(prefix, "Lib") else: - lib_dir = os.path.join(prefix, 'lib') - python_folder = glob(os.path.join(lib_dir, 'python?.*')) + lib_dir = os.path.join(prefix, "lib") + python_folder = glob(os.path.join(lib_dir, "python?.*"), recursive=True) python_folder = sorted(filterfalse(islink, python_folder)) if python_folder: lib_dir = os.path.join(lib_dir, python_folder[0]) else: - lib_dir = os.path.join(lib_dir, f'python{py_ver}') + lib_dir = os.path.join(lib_dir, f"python{py_ver}") return lib_dir def get_site_packages(prefix, py_ver): - return os.path.join(get_stdlib_dir(prefix, py_ver), 'site-packages') + return os.path.join(get_stdlib_dir(prefix, py_ver), "site-packages") -def get_build_folders(croot): +def get_build_folders(croot: str | os.PathLike | Path) -> list[str]: # remember, glob is not a regex. - return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*")) + return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*"), recursive=True) def prepend_bin_path(env, prefix, prepend_prefix=False): - # bin_dirname takes care of bin on *nix, Scripts on win - env['PATH'] = join(prefix, bin_dirname) + os.pathsep + env['PATH'] - if sys.platform == "win32": - env['PATH'] = join(prefix, "Library", "mingw-w64", "bin") + os.pathsep + \ - join(prefix, "Library", "usr", "bin") + os.pathsep + os.pathsep + \ - join(prefix, "Library", "bin") + os.pathsep + \ - join(prefix, "Scripts") + os.pathsep + \ - env['PATH'] + env["PATH"] = join(prefix, "bin") + os.pathsep + env["PATH"] + if on_win: + env["PATH"] = ( + join(prefix, "Library", "mingw-w64", "bin") + + os.pathsep + + join(prefix, "Library", "usr", "bin") + + os.pathsep + + join(prefix, "Library", "bin") + + os.pathsep + + join(prefix, "Scripts") + + os.pathsep + + env["PATH"] + ) prepend_prefix = True # windows has Python in the prefix. Use it. if prepend_prefix: - env['PATH'] = prefix + os.pathsep + env['PATH'] + env["PATH"] = prefix + os.pathsep + env["PATH"] return env @@ -971,13 +1019,13 @@ def prepend_bin_path(env, prefix, prepend_prefix=False): def sys_path_prepended(prefix): path_backup = sys.path[:] if on_win: - sys.path.insert(1, os.path.join(prefix, 'lib', 'site-packages')) + sys.path.insert(1, os.path.join(prefix, "lib", "site-packages")) else: - lib_dir = os.path.join(prefix, 'lib') - python_dir = glob(os.path.join(lib_dir, r'python[0-9\.]*')) + lib_dir = os.path.join(prefix, "lib") + python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*"), recursive=True) if python_dir: python_dir = python_dir[0] - sys.path.insert(1, os.path.join(python_dir, 'site-packages')) + sys.path.insert(1, os.path.join(python_dir, "site-packages")) try: yield finally: @@ -985,45 +1033,50 @@ def sys_path_prepended(prefix): @contextlib.contextmanager -def path_prepended(prefix): - old_path = os.environ['PATH'] - os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] +def path_prepended(prefix, prepend_prefix=True): + # FIXME: Unclear why prepend_prefix=True for all platforms. + old_path = os.environ["PATH"] + os.environ["PATH"] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)[ + "PATH" + ] try: yield finally: - os.environ['PATH'] = old_path + os.environ["PATH"] = old_path -bin_dirname = 'Scripts' if sys.platform == 'win32' else 'bin' +bin_dirname = "Scripts" if on_win else "bin" -entry_pat = re.compile(r'\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$') +entry_pat = re.compile(r"\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$") def iter_entry_points(items): for item in items: m = entry_pat.match(item) if m is None: - sys.exit("Error cound not match entry point: %r" % item) + sys.exit(f"Error cound not match entry point: {item!r}") yield m.groups() def create_entry_point(path, module, func, config): - import_name = func.split('.')[0] - pyscript = PY_TMPL % { - 'module': module, 'func': func, 'import_name': import_name} + import_name = func.split(".")[0] + pyscript = PY_TMPL % {"module": module, "func": func, "import_name": import_name} if on_win: - with open(path + '-script.py', 'w') as fo: - if os.path.isfile(os.path.join(config.host_prefix, 'python_d.exe')): - fo.write('#!python_d\n') + with open(path + "-script.py", "w") as fo: + if os.path.isfile(os.path.join(config.host_prefix, "python_d.exe")): + fo.write("#!python_d\n") fo.write(pyscript) - copy_into(join(dirname(__file__), f'cli-{str(config.host_arch)}.exe'), - path + '.exe', config.timeout) + copy_into( + join(dirname(__file__), f"cli-{str(config.host_arch)}.exe"), + path + ".exe", + config.timeout, + ) else: if os.path.islink(path): os.remove(path) - with open(path, 'w') as fo: + with open(path, "w") as fo: if not config.noarch: - fo.write('#!%s\n' % config.host_python) + fo.write(f"#!{config.host_python}\n") fo.write(pyscript) os.chmod(path, 0o775) @@ -1051,36 +1104,46 @@ def get_ext_files(start_path, pattern): def convert_path_for_cygwin_or_msys2(exe, path): "If exe is a Cygwin or MSYS2 executable then filters it through `cygpath -u`" - if sys.platform != 'win32': + if not on_win: return path if exe not in _posix_exes_cache: with open(exe, "rb") as exe_file: exe_binary = exe_file.read() - msys2_cygwin = re.findall(b'(cygwin1.dll|msys-2.0.dll)', exe_binary) + msys2_cygwin = re.findall(b"(cygwin1.dll|msys-2.0.dll)", exe_binary) _posix_exes_cache[exe] = True if msys2_cygwin else False if _posix_exes_cache[exe]: try: - path = check_output_env(['cygpath', '-u', - path]).splitlines()[0].decode(getpreferredencoding()) + path = ( + check_output_env(["cygpath", "-u", path]) + .splitlines()[0] + .decode(getpreferredencoding()) + ) except OSError: log = get_logger(__name__) - log.debug('cygpath executable not found. Passing native path. This is OK for msys2.') + log.debug( + "cygpath executable not found. Passing native path. This is OK for msys2." + ) return path -def get_skip_message(m): - return ("Skipped: {} from {} defines build/skip for this configuration ({}).".format( - m.name(), m.path, - {k: m.config.variant[k] for k in m.get_used_vars()})) +def get_skip_message(m: MetaData) -> str: + return ( + f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration " + f"({({k: m.config.variant[k] for k in m.get_used_vars()})})." + ) -def package_has_file(package_path, file_path, refresh_mode='modified'): +def package_has_file(package_path, file_path, refresh_mode="modified"): # This version does nothing to the package cache. with TemporaryDirectory() as td: - if file_path.startswith('info'): - conda_package_handling.api.extract(package_path, dest_dir=td, components='info') + if file_path.startswith("info"): + conda_package_handling.api.extract( + package_path, dest_dir=td, components="info" + ) else: - conda_package_handling.api.extract(package_path, dest_dir=td, components=file_path) + conda_package_handling.api.extract( + package_path, dest_dir=td, components=file_path + ) resolved_file_path = os.path.join(td, file_path) if os.path.exists(resolved_file_path): # TODO :: Remove this text-mode load. Files are binary. @@ -1088,14 +1151,14 @@ def package_has_file(package_path, file_path, refresh_mode='modified'): with open(resolved_file_path) as f: content = f.read() except UnicodeDecodeError: - with open(resolved_file_path, 'rb') as f: + with open(resolved_file_path, "rb") as f: content = f.read() else: content = False return content -def ensure_list(arg, include_dict=True): +def ensure_list(arg: T | Iterable[T] | None, include_dict: bool = True) -> list[T]: """ Ensure the object is a list. If not return it in a list. @@ -1114,7 +1177,11 @@ def ensure_list(arg, include_dict=True): return [arg] -def islist(arg, uniform=False, include_dict=True): +def islist( + arg: T | Iterable[T], + uniform: bool = False, + include_dict: bool = True, +) -> bool: """ Check whether `arg` is a `list`. Optionally determine whether the list elements are all uniform. @@ -1145,7 +1212,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, '__iter__'): + if isinstance(arg, str) or not isinstance(arg, Iterable): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1156,6 +1223,7 @@ def islist(arg, uniform=False, include_dict=True): return True # NOTE: not checking for Falsy arg since arg may be a generator + # WARNING: if uniform != False and arg is a generator then arg will be consumed if uniform is True: arg = iter(arg) @@ -1165,7 +1233,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype + uniform = lambda e: type(e) == etype # noqa: E731 try: return all(uniform(e) for e in arg) @@ -1184,9 +1252,13 @@ def tmp_chdir(dest): os.chdir(curdir) -def expand_globs(path_list, root_dir): +def expand_globs( + path_list: str | os.PathLike | Path | Iterable[str | os.PathLike | Path], + root_dir: str | os.PathLike | Path, +) -> list[str]: files = [] for path in ensure_list(path_list): + path = str(path) if not os.path.isabs(path): path = os.path.join(root_dir, path) if os.path.isfile(path): @@ -1201,25 +1273,25 @@ def expand_globs(path_list, root_dir): files.append(os.path.join(root, folder)) else: # File compared to the globs use / as separator independently of the os - glob_files = glob(path) + glob_files = glob(path, recursive=True) if not glob_files: log = get_logger(__name__) - log.error(f'Glob {path} did not match in root_dir {root_dir}') + log.error(f"Glob {path} did not match in root_dir {root_dir}") # https://docs.python.org/3/library/glob.html#glob.glob states that # "whether or not the results are sorted depends on the file system". # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) - prefix_path_re = re.compile('^' + re.escape(f'{root_dir}{os.path.sep}')) - files = [prefix_path_re.sub('', f, 1) for f in files] - return files + prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) + return [prefix_path_re.sub("", f, 1) for f in files] -def find_recipe(path): +def find_recipe(path: str) -> str: """recurse through a folder, locating valid meta files (see VALID_METAS). Raises error if more than one is found. Returns full path to meta file to be built. - If we have a base level meta file and other supplemental (nested) ones, use the base level.""" + If we have a base level meta file and other supplemental (nested) ones, use the base level. + """ # if initial path is absolute then any path we find (via rec_glob) # will also be absolute if not os.path.isabs(path): @@ -1228,12 +1300,16 @@ def find_recipe(path): if os.path.isfile(path): if os.path.basename(path) in VALID_METAS: return path - raise OSError("{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS))) + raise OSError( + "{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS)) + ) results = list(rec_glob(path, VALID_METAS, ignores=(".AppleDouble",))) if not results: - raise OSError("No meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "No meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) if len(results) == 1: return results[0] @@ -1243,20 +1319,43 @@ def find_recipe(path): metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))] if len(metas) == 1: - get_logger(__name__).warn("Multiple meta files found. " - "The %s file in the base directory (%s) " - "will be used." % (metas[0], path)) + get_logger(__name__).warn( + "Multiple meta files found. " + f"The {metas[0]} file in the base directory ({path}) " + "will be used." + ) return os.path.join(path, metas[0]) - raise OSError("More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) class LoggingContext: - default_loggers = ['conda', 'binstar', 'install', 'conda.install', 'fetch', 'conda.instructions', - 'fetch.progress', 'print', 'progress', 'dotupdate', 'stdoutlog', 'requests', - 'conda.core.package_cache', 'conda.plan', 'conda.gateways.disk.delete', - 'conda_build', 'conda_build.index', 'conda_build.noarch_python', - 'urllib3.connectionpool'] + default_loggers = [ + "conda", + "binstar", + "install", + "conda.install", + "fetch", + "conda.instructions", + "fetch.progress", + "print", + "progress", + "dotupdate", + "stdoutlog", + "requests", + "conda.core.package_cache_data", + "conda.plan", + "conda.gateways.disk.delete", + "conda_build", + "conda_build.index", + "conda_build.noarch_python", + "urllib3.connectionpool", + "conda_index", + "conda_index.index", + "conda_index.index.convert_cache", + ] def __init__(self, level=logging.WARN, handler=None, close=True, loggers=None): self.level = level @@ -1274,8 +1373,11 @@ def __enter__(self): if isinstance(logger, str): log = logging.getLogger(logger) self.old_levels[logger] = log.level - log.setLevel(self.level if ('install' not in logger or - self.level < logging.INFO) else self.level + 10) + log.setLevel( + self.level + if ("install" not in logger or self.level < logging.INFO) + else self.level + 10 + ) if self.handler: self.logger.addHandler(self.handler) @@ -1295,23 +1397,24 @@ def __exit__(self, et, ev, tb): def get_installed_packages(path): - ''' + """ Scan all json files in 'path' and return a dictionary with their contents. Files are assumed to be in 'index.json' format. - ''' + """ installed = dict() - for filename in glob(os.path.join(path, 'conda-meta', '*.json')): + for filename in glob(os.path.join(path, "conda-meta", "*.json"), recursive=True): with open(filename) as file: data = json.load(file) - installed[data['name']] = data + installed[data["name"]] = data return installed +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def _convert_lists_to_sets(_dict): for k, v in _dict.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): _dict[k] = HashableDict(_convert_lists_to_sets(v)) - elif hasattr(v, '__iter__') and not isinstance(v, str): + elif hasattr(v, "__iter__") and not isinstance(v, str): try: _dict[k] = sorted(list(set(v))) except TypeError: @@ -1319,9 +1422,10 @@ def _convert_lists_to_sets(_dict): return _dict +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") class HashableDict(dict): - """use hashable frozen dictionaries for resources and resource types so that they can be in sets - """ + """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self = _convert_lists_to_sets(self) @@ -1330,6 +1434,7 @@ def __hash__(self): return hash(json.dumps(self, sort_keys=True)) +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def represent_hashabledict(dumper, data): value = [] @@ -1339,7 +1444,7 @@ def represent_hashabledict(dumper, data): value.append((node_key, node_value)) - return yaml.nodes.MappingNode('tag:yaml.org,2002:map', value) + return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) yaml.add_representer(HashableDict, represent_hashabledict) @@ -1349,6 +1454,7 @@ def represent_hashabledict(dumper, data): @contextlib.contextmanager def capture(): import sys + oldout, olderr = sys.stdout, sys.stderr try: out = [StringIO(), StringIO()] @@ -1383,19 +1489,19 @@ def env_var(name, value, callback=None): def trim_empty_keys(dict_): to_remove = set() - negative_means_empty = ('final', 'noarch_python', 'zip_keys') + negative_means_empty = ("final", "noarch_python", "zip_keys") for k, v in dict_.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): trim_empty_keys(v) # empty lists and empty strings, and None are always empty. - if v == list() or v == '' or v is None or v == dict(): + if v == list() or v == "" or v is None or v == dict(): to_remove.add(k) # other things that evaluate as False may not be "empty" - things can be manually set to # false, and we need to keep that setting. if not v and k in negative_means_empty: to_remove.add(k) - if 'zip_keys' in dict_ and not any(v for v in dict_['zip_keys']): - to_remove.add('zip_keys') + if "zip_keys" in dict_ and not any(v for v in dict_["zip_keys"]): + to_remove.add("zip_keys") for k in to_remove: del dict_[k] @@ -1403,17 +1509,17 @@ def trim_empty_keys(dict_): def _increment(version, alpha_ver): try: if alpha_ver: - suffix = 'a' + suffix = "a" else: - suffix = '.0a0' + suffix = ".0a0" last_version = str(int(version) + 1) + suffix except ValueError: last_version = chr(ord(version) + 1) return last_version -def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): - pins = [len(p.split('.')) if p else None for p in (min_pin, max_pin)] +def apply_pin_expressions(version, min_pin="x.x.x.x.x.x.x", max_pin="x"): + pins = [len(p.split(".")) if p else None for p in (min_pin, max_pin)] parsed_version = VersionOrder(version).version[1:] nesting_position = None flat_list = [] @@ -1423,9 +1529,9 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): flat_list.extend(item) else: flat_list.append(item) - if max_pin and len(max_pin.split('.')) > len(flat_list): + if max_pin and len(max_pin.split(".")) > len(flat_list): pins[1] = len(flat_list) - versions = ['', ''] + versions = ["", ""] # first idx is lower bound pin; second is upper bound pin. # pin value is number of places to pin. for p_idx, pin in enumerate(pins): @@ -1439,8 +1545,8 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): v = _increment(v, alpha_ver) versions[p_idx] += str(v) if v_idx != nesting_position: - versions[p_idx] += '.' - if versions[p_idx][-1] == '.': + versions[p_idx] += "." + if versions[p_idx][-1] == ".": versions[p_idx] = versions[p_idx][:-1] if versions[0]: if version.endswith(".*"): @@ -1452,88 +1558,77 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): if version_order < VersionOrder(versions[0]): # If the minimum is greater than the version this is a pre-release build. # Use the version as the lower bound - versions[0] = '>=' + version + versions[0] = ">=" + version else: - versions[0] = '>=' + versions[0] + versions[0] = ">=" + versions[0] if versions[1]: - versions[1] = '<' + versions[1] - return ','.join([v for v in versions if v]) - - -def filter_files(files_list, prefix, filter_patterns=(r'(.*[\\/])?\.git[\\/].*', - r'(.*[\\/])?\.git$', - r'(.*)?\.DS_Store.*', - r'.*\.la$', - r'conda-meta.*', - r'.*\.conda_trash(?:_\d+)*$')): + versions[1] = "<" + versions[1] + return ",".join([v for v in versions if v]) + + +def filter_files( + files_list, + prefix, + filter_patterns=( + r"(.*[\\/])?\.git[\\/].*", + r"(.*[\\/])?\.git$", + r"(.*)?\.DS_Store.*", + r".*\.la$", + r"conda-meta.*", + r".*\.conda_trash(?:_\d+)*$", + ), +): """Remove things like the .git directory from the list of files to be copied""" for pattern in filter_patterns: r = re.compile(pattern) files_list = set(files_list) - set(filter(r.match, files_list)) - return [f for f in files_list - if not os.path.isdir(os.path.join(prefix, f)) or - os.path.islink(os.path.join(prefix, f))] + return [ + f + for f in files_list + if not os.path.isdir(os.path.join(prefix, f)) + or os.path.islink(os.path.join(prefix, f)) + ] def filter_info_files(files_list, prefix): - return filter_files(files_list, prefix, filter_patterns=( - 'info[\\\\/]index.json', - 'info[\\\\/]files', - 'info[\\\\/]paths.json', - 'info[\\\\/]about.json', - 'info[\\\\/]has_prefix', - 'info[\\\\/]hash_input_files', # legacy, not used anymore - 'info[\\\\/]hash_input.json', - 'info[\\\\/]run_exports.yaml', # legacy - 'info[\\\\/]run_exports.json', # current - 'info[\\\\/]git', - 'info[\\\\/]recipe[\\\\/].*', - 'info[\\\\/]recipe_log.json', - 'info[\\\\/]recipe.tar', - 'info[\\\\/]test[\\\\/].*', - 'info[\\\\/]LICENSE.txt', # legacy, some tests rely on this - 'info[\\\\/]licenses[\\\\/]*', - 'info[\\\\/]prelink_messages[\\\\/]*', - 'info[\\\\/]requires', - 'info[\\\\/]meta', - 'info[\\\\/]platform', - 'info[\\\\/]no_link', - 'info[\\\\/]link.json', - 'info[\\\\/]icon.png', - )) - - -def rm_rf(path, config=None): - if conda_46: - return _rm_rf(path) - if os.path.isdir(path): - try: - # subprocessing to delete large folders can be quite a bit faster - if on_win: - subprocess.check_call(f'rd /s /q {path}', shell=True) - else: - try: - os.makedirs('.empty') - except: - pass - del_dir_cmd = 'rsync -a --delete .empty {}/' - subprocess.check_call(del_dir_cmd.format(path).split()) - try: - shutil.rmtree('.empty') - except: - pass - # we don't really care about errors that much. People can and should - # clean out their folders once in a while with "purge" - except: - pass + return filter_files( + files_list, + prefix, + filter_patterns=( + "info[\\\\/]index.json", + "info[\\\\/]files", + "info[\\\\/]paths.json", + "info[\\\\/]about.json", + "info[\\\\/]has_prefix", + "info[\\\\/]hash_input_files", # legacy, not used anymore + "info[\\\\/]hash_input.json", + "info[\\\\/]run_exports.yaml", # legacy + "info[\\\\/]run_exports.json", # current + "info[\\\\/]git", + "info[\\\\/]recipe[\\\\/].*", + "info[\\\\/]recipe_log.json", + "info[\\\\/]recipe.tar", + "info[\\\\/]test[\\\\/].*", + "info[\\\\/]LICENSE.txt", # legacy, some tests rely on this + "info[\\\\/]licenses[\\\\/]*", + "info[\\\\/]prelink_messages[\\\\/]*", + "info[\\\\/]requires", + "info[\\\\/]meta", + "info[\\\\/]platform", + "info[\\\\/]no_link", + "info[\\\\/]link.json", + "info[\\\\/]icon.png", + ), + ) + - conda_log_level = logging.WARN - if config and config.debug: - conda_log_level = logging.DEBUG - with LoggingContext(conda_log_level): - # this clears out the path from conda's cache, which otherwise thinks - # that things are still installed here - _rm_rf(path) +@deprecated.argument("24.5", "24.7", "config") +def rm_rf(path): + from conda.core.prefix_data import delete_prefix_from_linked_data + from conda.gateways.disk.delete import rm_rf as rm_rf + + rm_rf(path) + delete_prefix_from_linked_data(path) # https://stackoverflow.com/a/31459386/1170370 @@ -1571,14 +1666,15 @@ def filter(self, record): dedupe_filter = DuplicateFilter() info_debug_stdout_filter = LessThanFilter(logging.WARNING) warning_error_stderr_filter = GreaterThanFilter(logging.INFO) +level_formatter = logging.Formatter("%(levelname)s: %(message)s") # set filelock's logger to only show warnings by default -logging.getLogger('filelock').setLevel(logging.WARN) +logging.getLogger("filelock").setLevel(logging.WARN) # quiet some of conda's less useful output -logging.getLogger('conda.core.linked_data').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.delete').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.test').setLevel(logging.WARN) +logging.getLogger("conda.core.linked_data").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.delete").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.test").setLevel(logging.WARN) def reset_deduplicator(): @@ -1590,26 +1686,32 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get('log_config_file'): - config_file = abspath(expanduser(expandvars(cc_conda_build.get('log_config_file')))) + if log_config_file := context.conda_build.get("log_config_file"): + config_file = abspath(expanduser(expandvars(log_config_file))) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: with open(config_file) as f: config_dict = yaml.safe_load(f) logging.config.dictConfig(config_dict) - level = config_dict.get('loggers', {}).get(name, {}).get('level', level) + level = config_dict.get("loggers", {}).get(name, {}).get("level", level) log = logging.getLogger(name) log.setLevel(level) if dedupe: log.addFilter(dedupe_filter) # these are defaults. They can be overridden by configuring a log config yaml file. - if not log.handlers and add_stdout_stderr_handlers: + top_pkg = name.split(".")[0] + if top_pkg == "conda_build": + # we don't want propagation in CLI, but we do want it in tests + # this is a pytest limitation: https://github.com/pytest-dev/pytest/issues/3697 + logging.getLogger(top_pkg).propagate = "PYTEST_CURRENT_TEST" in os.environ + if add_stdout_stderr_handlers and not log.handlers: stdout_handler = logging.StreamHandler(sys.stdout) stderr_handler = logging.StreamHandler(sys.stderr) stdout_handler.addFilter(info_debug_stdout_filter) stderr_handler.addFilter(warning_error_stderr_filter) + stderr_handler.setFormatter(level_formatter) stdout_handler.setLevel(level) stderr_handler.setLevel(level) log.addHandler(stdout_handler) @@ -1621,25 +1723,30 @@ def _equivalent(base_value, value, path): equivalent = value == base_value if isinstance(value, str) and isinstance(base_value, str): if not os.path.isabs(base_value): - base_value = os.path.abspath(os.path.normpath(os.path.join(path, base_value))) + base_value = os.path.abspath( + os.path.normpath(os.path.join(path, base_value)) + ) if not os.path.isabs(value): value = os.path.abspath(os.path.normpath(os.path.join(path, value))) equivalent |= base_value == value return equivalent -def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True): +def merge_or_update_dict( + base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True +): if base == new: return base log = get_logger(__name__) for key, value in new.items(): if key in base or add_missing_keys: base_value = base.get(key, value) - if hasattr(value, 'keys'): - base_value = merge_or_update_dict(base_value, value, path, merge, - raise_on_clobber=raise_on_clobber) + if hasattr(value, "keys"): + base_value = merge_or_update_dict( + base_value, value, path, merge, raise_on_clobber=raise_on_clobber + ) base[key] = base_value - elif hasattr(value, '__iter__') and not isinstance(value, str): + elif hasattr(value, "__iter__") and not isinstance(value, str): if merge: if base_value != value: try: @@ -1653,10 +1760,15 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, else: base[key] = value else: - if (base_value and merge and not _equivalent(base_value, value, path) and - raise_on_clobber): - log.debug('clobbering key {} (original value {}) with value {}'.format(key, - base_value, value)) + if ( + base_value + and merge + and not _equivalent(base_value, value, path) + and raise_on_clobber + ): + log.debug( + f"clobbering key {key} (original value {base_value}) with value {value}" + ) if value is None and key in base: del base[key] else: @@ -1664,50 +1776,66 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, return base -def merge_dicts_of_lists(dol1, dol2): - ''' +def merge_dicts_of_lists( + dol1: Mapping[K, Iterable[V]], + dol2: Mapping[K, Iterable[V]], +) -> dict[K, list[V]]: + """ From Alex Martelli: https://stackoverflow.com/a/1495821/3257826 - ''' + """ keys = set(dol1).union(dol2) no = [] return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} -def prefix_files(prefix): - ''' +def prefix_files(prefix: str | os.PathLike | Path) -> set[str]: + """ Returns a set of all files in prefix. - ''' - res = set() - prefix_rep = prefix + os.path.sep - for root, dirs, files in walk(prefix): - for fn in files: - # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, '', 1)) - for dn in dirs: - path = join(root, dn) - if islink(path): - res.add(path.replace(prefix_rep, '', 1)) - res.update(expand_globs((path, ), prefix)) - return res - - -def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_PROT_WRITE, - access=None, offset=0): - ''' + """ + prefix = f"{os.path.abspath(prefix)}{os.path.sep}" + prefix_files: set[str] = set() + for root, directories, files in walk(prefix): + # this is effectively os.path.relpath, just hacked to be faster + relroot = root[len(prefix) :].lstrip(os.path.sep) + # add all files + prefix_files.update(join(relroot, file) for file in files) + # add all symlink directories (they are "files") + prefix_files.update( + join(relroot, directory) + for directory in directories + if islink(join(root, directory)) + ) + return prefix_files + + +def mmap_mmap( + fileno, + length, + tagname=None, + flags=0, + prot=mmap_PROT_READ | mmap_PROT_WRITE, + access=None, + offset=0, +): + """ Hides the differences between mmap.mmap on Windows and Unix. Windows has `tagname`. Unix does not, but makes up for it with `flags` and `prot`. On both, the default value for `access` is determined from how the file was opened so must not be passed in at all to get this default behaviour. - ''' + """ if on_win: if access: - return mmap.mmap(fileno, length, tagname=tagname, access=access, offset=offset) + return mmap.mmap( + fileno, length, tagname=tagname, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, tagname=tagname) else: if access: - return mmap.mmap(fileno, length, flags=flags, prot=prot, access=access, offset=offset) + return mmap.mmap( + fileno, length, flags=flags, prot=prot, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, flags=flags, prot=prot) @@ -1715,21 +1843,21 @@ def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_ def remove_pycache_from_scripts(build_prefix): """Remove pip created pycache directory from bin or Scripts.""" if on_win: - scripts_path = os.path.join(build_prefix, 'Scripts') + scripts_path = os.path.join(build_prefix, "Scripts") else: - scripts_path = os.path.join(build_prefix, 'bin') + scripts_path = os.path.join(build_prefix, "bin") if os.path.isdir(scripts_path): for entry in os.listdir(scripts_path): entry_path = os.path.join(scripts_path, entry) - if os.path.isdir(entry_path) and entry.strip(os.sep) == '__pycache__': + if os.path.isdir(entry_path) and entry.strip(os.sep) == "__pycache__": shutil.rmtree(entry_path) - elif os.path.isfile(entry_path) and entry_path.endswith('.pyc'): + elif os.path.isfile(entry_path) and entry_path.endswith(".pyc"): os.remove(entry_path) -def sort_list_in_nested_structure(dictionary, omissions=''): +def sort_list_in_nested_structure(dictionary, omissions=""): """Recurse through a nested dictionary and sort any lists that are found. If the list that is found contains anything but strings, it is skipped @@ -1742,9 +1870,11 @@ def sort_list_in_nested_structure(dictionary, omissions=''): section = dictionary[field][key] if isinstance(section, dict): sort_list_in_nested_structure(section) - elif (isinstance(section, list) and - '{}/{}' .format(field, key) not in omissions and - all(isinstance(item, str) for item in section)): + elif ( + isinstance(section, list) + and f"{field}/{key}" not in omissions + and all(isinstance(item, str) for item in section) + ): section.sort() # there's a possibility for nested lists containing dictionaries @@ -1767,51 +1897,69 @@ def sort_list_in_nested_structure(dictionary, omissions=''): # if you are seeing mysterious unsatisfiable errors, with the package you're building being the # unsatisfiable part, then you probably need to update this regex. -spec_needing_star_re = re.compile(r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$") # NOQA +spec_needing_star_re = re.compile( + r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$" +) # NOQA spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$") -def ensure_valid_spec(spec, warn=False): +@overload +def ensure_valid_spec(spec: str, warn: bool = False) -> str: ... + + +@overload +def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec: ... + + +def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchSpec: if isinstance(spec, MatchSpec): - if (hasattr(spec, 'version') and spec.version and (not spec.get('build', '')) and - spec_ver_needing_star_re.match(str(spec.version))): - if str(spec.name) not in ('python', 'numpy') or str(spec.version) != 'x.x': - spec = MatchSpec("{} {}".format(str(spec.name), str(spec.version) + '.*')) + if ( + hasattr(spec, "version") + and spec.version + and (not spec.get("build", "")) + and spec_ver_needing_star_re.match(str(spec.version)) + ): + if str(spec.name) not in ("python", "numpy") or str(spec.version) != "x.x": + spec = MatchSpec( + "{} {}".format(str(spec.name), str(spec.version) + ".*") + ) else: match = spec_needing_star_re.match(spec) # ignore exact pins (would be a 3rd group) if match and not match.group(3): - if match.group(1) in ('python', 'numpy') and match.group(2) == 'x.x': + if match.group(1) in ("python", "numpy") and match.group(2) == "x.x": spec = spec_needing_star_re.sub(r"\1 \2", spec) else: if "*" not in spec: - if match.group(1) not in ('python', 'vc') and warn: + if match.group(1) not in ("python", "vc") and warn: log = get_logger(__name__) - log.warn("Adding .* to spec '{}' to ensure satisfiability. Please " - "consider putting {{{{ var_name }}}}.* or some relational " - "operator (>/=/<=) on this spec in meta.yaml, or if req is " - "also a build req, using {{{{ pin_compatible() }}}} jinja2 " - "function instead. See " - "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level" # NOQA - .format(spec)) + log.warn( + f"Adding .* to spec '{spec}' to ensure satisfiability. Please " + "consider putting {{{{ var_name }}}}.* or some relational " + "operator (>/=/<=) on this spec in meta.yaml, or if req is " + "also a build req, using {{{{ pin_compatible() }}}} jinja2 " + "function instead. See " + "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level" + ) spec = spec_needing_star_re.sub(r"\1 \2.*", spec) return spec def insert_variant_versions(requirements_dict, variant, env): - build_deps = (ensure_list(requirements_dict.get('build')) + - ensure_list(requirements_dict.get('host'))) + build_deps = ensure_list(requirements_dict.get("build")) + ensure_list( + requirements_dict.get("host") + ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r'^(%s)(?:\s*$)' % key.replace('_', '[-_]')) + regex = re.compile(r"^({})(?:\s*$)".format(key.replace("_", "[-_]"))) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): - if x and (env in ('build', 'host') or x.group(1) in build_deps): + if x and (env in ("build", "host") or x.group(1) in build_deps): del reqs[i] if not isinstance(val, str): val = val[0] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), val)))) + reqs.insert(i, ensure_valid_spec(" ".join((x.group(1), val)))) xx_re = re.compile(r"([0-9a-zA-Z\.\-\_]+)\s+x\.x") @@ -1820,7 +1968,10 @@ def insert_variant_versions(requirements_dict, variant, env): for i, x in enumerate(matches): if x: del reqs[i] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), variant.get(x.group(1)))))) + reqs.insert( + i, + ensure_valid_spec(" ".join((x.group(1), variant.get(x.group(1))))), + ) if reqs: requirements_dict[env] = reqs @@ -1829,19 +1980,15 @@ def match_peer_job(target_matchspec, other_m, this_m=None): """target_matchspec comes from the recipe. target_variant is the variant from the recipe whose deps we are matching. m is the peer job, which must satisfy conda and also have matching keys for any keys that are shared between target_variant and m.config.variant""" - match_dict = {'name': other_m.name(), - 'version': other_m.version(), - 'build': '', } - if conda_43: - match_dict = Dist(name=match_dict['name'], - dist_name='-'.join((match_dict['name'], - match_dict['version'], - match_dict['build'])), - version=match_dict['version'], - build_string=match_dict['build'], - build_number=other_m.build_number(), - channel=None) - matchspec_matches = target_matchspec.match(match_dict) + name, version, build = other_m.name(), other_m.version(), "" + matchspec_matches = target_matchspec.match( + PackageRecord( + name=name, + version=version, + build=build, + build_number=other_m.build_number(), + ) + ) variant_matches = True if this_m: @@ -1853,10 +2000,13 @@ def match_peer_job(target_matchspec, other_m, this_m=None): def expand_reqs(reqs_entry): - if not hasattr(reqs_entry, 'keys'): + if not hasattr(reqs_entry, "keys"): original = ensure_list(reqs_entry)[:] - reqs_entry = {'host': ensure_list(original), - 'run': ensure_list(original)} if original else {} + reqs_entry = ( + {"host": ensure_list(original), "run": ensure_list(original)} + if original + else {} + ) else: for sec in reqs_entry: reqs_entry[sec] = ensure_list(reqs_entry[sec]) @@ -1871,17 +2021,16 @@ def sha256_checksum(filename, buffersize=65536): if not isfile(filename): return None sha256 = hashlib.sha256() - with open(filename, 'rb') as f: - for block in iter(lambda: f.read(buffersize), b''): + with open(filename, "rb") as f: + for block in iter(lambda: f.read(buffersize), b""): sha256.update(block) return sha256.hexdigest() def write_bat_activation_text(file_handle, m): - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda_hook.bat"\n'.format( - conda_root=root_script_dir, - )) + from .os_utils.external import find_executable + + file_handle.write(f'call "{root_script_dir}\\..\\condabin\\conda_hook.bat"\n') if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, # and with their activate.d scripts sourced. Conda only @@ -1899,77 +2048,62 @@ def write_bat_activation_text(file_handle, m): # exists to identify a valid conda environment # conda 4.6 changes this one final time, by adding a '--stack' flag to the 'activate' # command, and 'activate' does not stack environments by default without that flag - history_file = join(m.config.host_prefix, 'conda-meta', 'history') + history_file = join(m.config.host_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() + open(history_file, "a").close() - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda.bat" activate "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.host_prefix, - )) - else: - file_handle.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.host_prefix)) - # removing this placeholder should make conda double-activate with conda 4.3 - file_handle.write('set "PATH=%PATH:CONDA_PATH_PLACEHOLDER;=%"\n') - file_handle.write('set CONDA_MAX_SHLVL=2\n') + file_handle.write( + f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate "{m.config.host_prefix}"\n' + ) # Write build prefix activation AFTER host prefix, so that its executables come first - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda.bat" activate --stack "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.build_prefix, - )) - else: - file_handle.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.build_prefix)) - from conda_build.os_utils.external import find_executable - ccache = find_executable('ccache', m.config.build_prefix, False) + file_handle.write( + f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate --stack "{m.config.build_prefix}"\n' + ) + + ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: if isinstance(ccache, list): ccache = ccache[0] ccache_methods = {} - ccache_methods['env_vars'] = False - ccache_methods['symlinks'] = False - ccache_methods['native'] = False - if hasattr(m.config, 'ccache_method'): + ccache_methods["env_vars"] = False + ccache_methods["symlinks"] = False + ccache_methods["native"] = False + if hasattr(m.config, "ccache_method"): ccache_methods[m.config.ccache_method] = True for method, value in ccache_methods.items(): if value: - if method == 'env_vars': + if method == "env_vars": file_handle.write(f'set "CC={ccache} %CC%"\n') file_handle.write(f'set "CXX={ccache} %CXX%"\n') - elif method == 'symlinks': - dirname_ccache_ln_bin = join(m.config.build_prefix, 'ccache-ln-bin') - file_handle.write(f'mkdir {dirname_ccache_ln_bin}\n') - file_handle.write(f'pushd {dirname_ccache_ln_bin}\n') + elif method == "symlinks": + dirname_ccache_ln_bin = join(m.config.build_prefix, "ccache-ln-bin") + file_handle.write(f"mkdir {dirname_ccache_ln_bin}\n") + file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") # If you use mklink.exe instead of mklink here it breaks as it's a builtin. - for ext in ('.exe', ''): + for ext in (".exe", ""): # MSVC - file_handle.write(f'mklink cl{ext} {ccache}\n') - file_handle.write(f'mklink link{ext} {ccache}\n') + file_handle.write(f"mklink cl{ext} {ccache}\n") + file_handle.write(f"mklink link{ext} {ccache}\n") # GCC - file_handle.write(f'mklink gcc{ext} {ccache}\n') - file_handle.write(f'mklink g++{ext} {ccache}\n') - file_handle.write(f'mklink cc{ext} {ccache}\n') - file_handle.write(f'mklink c++{ext} {ccache}\n') - file_handle.write(f'mklink as{ext} {ccache}\n') - file_handle.write(f'mklink ar{ext} {ccache}\n') - file_handle.write(f'mklink nm{ext} {ccache}\n') - file_handle.write(f'mklink ranlib{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ar{ext} {ccache}\n') - file_handle.write(f'mklink gcc-nm{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ranlib{ext} {ccache}\n') - file_handle.write('popd\n') - file_handle.write('set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n'.format( - dirname_ccache_ln=dirname_ccache_ln_bin, - dirname_ccache=os.path.dirname(ccache))) - elif method == 'native': + file_handle.write(f"mklink gcc{ext} {ccache}\n") + file_handle.write(f"mklink g++{ext} {ccache}\n") + file_handle.write(f"mklink cc{ext} {ccache}\n") + file_handle.write(f"mklink c++{ext} {ccache}\n") + file_handle.write(f"mklink as{ext} {ccache}\n") + file_handle.write(f"mklink ar{ext} {ccache}\n") + file_handle.write(f"mklink nm{ext} {ccache}\n") + file_handle.write(f"mklink ranlib{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ar{ext} {ccache}\n") + file_handle.write(f"mklink gcc-nm{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ranlib{ext} {ccache}\n") + file_handle.write("popd\n") + file_handle.write( + f"set PATH={dirname_ccache_ln_bin};{os.path.dirname(ccache)};%PATH%\n" + ) + elif method == "native": pass else: print("ccache method {} not implemented") @@ -1980,15 +2114,15 @@ def write_bat_activation_text(file_handle, m): def download_channeldata(channel_url): global channeldata_cache - if channel_url.startswith('file://') or channel_url not in channeldata_cache: - urls = get_conda_channel(channel_url).urls() - urls = {url.rsplit('/', 1)[0] for url in urls} + if channel_url.startswith("file://") or channel_url not in channeldata_cache: + urls = Channel.from_value(channel_url).urls() + urls = {url.rsplit("/", 1)[0] for url in urls} data = {} for url in urls: with TemporaryDirectory() as td: tf = os.path.join(td, "channeldata.json") try: - download(url + '/channeldata.json', tf) + download(url + "/channeldata.json", tf) with open(tf) as f: new_channeldata = json.load(f) except (JSONDecodeError, CondaHTTPError): @@ -2000,20 +2134,6 @@ def download_channeldata(channel_url): return data -def linked_data_no_multichannels(prefix): - """ - Return a dictionary of the linked packages in prefix, with correct channels, hopefully. - cc @kalefranz. - """ - from conda.core.prefix_data import PrefixData - from conda.models.dist import Dist - - return { - Dist.from_string(prec.fn, channel_override=prec.channel.name): prec - for prec in PrefixData(prefix)._prefix_records.values() - } - - def shutil_move_more_retrying(src, dest, debug_name): log = get_logger(__name__) log.info(f"Renaming {debug_name} directory '{src}' to '{dest}'") @@ -2026,20 +2146,24 @@ def shutil_move_more_retrying(src, dest, debug_name): log.info(f"shutil.move({debug_name})={src}, dest={dest})") shutil.move(src, dest) if attempts_left != 5: - log.warning("shutil.move({}={}, dest={}) succeeded on attempt number {}".format(debug_name, src, dest, - 6 - attempts_left)) + log.warning( + f"shutil.move({debug_name}={src}, dest={dest}) succeeded on attempt number {6 - attempts_left}" + ) attempts_left = -1 except: attempts_left = attempts_left - 1 if attempts_left > 0: log.warning( - "Failed to rename {} directory, check with strace, struss or procmon. " - "Will sleep for 3 seconds and try again!".format(debug_name)) + f"Failed to rename {debug_name} directory, check with strace, struss or procmon. " + "Will sleep for 3 seconds and try again!" + ) import time + time.sleep(3) elif attempts_left != -1: log.error( - f"Failed to rename {debug_name} directory despite sleeping and retrying.") + f"Failed to rename {debug_name} directory despite sleeping and retrying." + ) def is_conda_pkg(pkg_path: str) -> bool: @@ -2048,8 +2172,10 @@ def is_conda_pkg(pkg_path: str) -> bool: """ path = Path(pkg_path) - return ( - path.is_file() and ( - any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) - ) + return path.is_file() and ( + any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) ) + + +def package_record_to_requirement(prec: PackageRecord) -> str: + return f"{prec.name} {prec.version} {prec.build}" diff --git a/conda_build/variants.py b/conda_build/variants.py index 2e7cd9b193..447025818c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -3,118 +3,146 @@ """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" +from __future__ import annotations + +import os.path +import re +import sys from collections import OrderedDict from copy import copy from functools import lru_cache from itertools import product -import os.path -from pkg_resources import parse_version -import re -import sys +from pathlib import Path +from typing import TYPE_CHECKING import yaml +from conda.base.context import context + +from .deprecations import deprecated +from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys +from .version import _parse as parse_version -from conda_build.conda_interface import subdir -from conda_build.conda_interface import cc_conda_build -from conda_build.utils import ensure_list, get_logger, islist, on_win, trim_empty_keys +if TYPE_CHECKING: + from typing import Any, Iterable DEFAULT_VARIANTS = { - 'python': f'{sys.version_info.major}.{sys.version_info.minor}', - 'numpy': '1.16', + "python": f"{sys.version_info.major}.{sys.version_info.minor}", + "numpy": { + # (python): numpy_version, # range of versions built for given python + (3, 8): "1.22", # 1.19-1.24 + (3, 9): "1.22", # 1.19-1.26 + (3, 10): "1.22", # 1.21-1.26 + (3, 11): "1.23", # 1.23-1.26 + (3, 12): "1.26", # 1.26- + }.get(sys.version_info[:2], "1.26"), # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. - 'perl': '5.26.2', - 'lua': '5', - 'r_base': '3.4' if on_win else '3.5', - 'cpu_optimization_target': 'nocona', - 'pin_run_as_build': OrderedDict(python=OrderedDict(min_pin='x.x', max_pin='x.x')), - 'ignore_version': [], - 'ignore_build_only_deps': ['python', 'numpy'], - 'extend_keys': ['pin_run_as_build', 'ignore_version', 'ignore_build_only_deps', 'extend_keys'], - 'cran_mirror': "https://cran.r-project.org", + "perl": "5.26.2", + "lua": "5", + "r_base": "3.4" if on_win else "3.5", + "cpu_optimization_target": "nocona", + "pin_run_as_build": { + "python": {"min_pin": "x.x", "max_pin": "x.x"}, + "r-base": {"min_pin": "x.x", "max_pin": "x.x"}, + }, + "ignore_version": [], + "ignore_build_only_deps": ["python", "numpy"], + "extend_keys": [ + "pin_run_as_build", + "ignore_version", + "ignore_build_only_deps", + "extend_keys", + ], + "cran_mirror": "https://cran.r-project.org", } -# set this outside the initialization because of the dash in the key -DEFAULT_VARIANTS['pin_run_as_build']['r-base'] = OrderedDict(min_pin='x.x', max_pin='x.x') - # map python version to default compiler on windows, to match upstream python # This mapping only sets the "native" compiler, and can be overridden by specifying a compiler # in the conda-build variant configuration DEFAULT_COMPILERS = { - 'win': { - 'c': { - '2.7': 'vs2008', - '3.3': 'vs2010', - '3.4': 'vs2010', - '3.5': 'vs2017', + "win": { + "c": { + "2.7": "vs2008", + "3.3": "vs2010", + "3.4": "vs2010", + "3.5": "vs2017", }, - 'cxx': { - '2.7': 'vs2008', - '3.3': 'vs2010', - '3.4': 'vs2010', - '3.5': 'vs2017', + "cxx": { + "2.7": "vs2008", + "3.3": "vs2010", + "3.4": "vs2010", + "3.5": "vs2017", }, - 'vc': { - '2.7': '9', - '3.3': '10', - '3.4': '10', - '3.5': '14', + "vc": { + "2.7": "9", + "3.3": "10", + "3.4": "10", + "3.5": "14", }, - 'fortran': 'gfortran', + "fortran": "gfortran", }, - 'linux': { - 'c': 'gcc', - 'cxx': 'gxx', - 'fortran': 'gfortran', + "linux": { + "c": "gcc", + "cxx": "gxx", + "fortran": "gfortran", }, - 'osx': { - 'c': 'clang', - 'cxx': 'clangxx', - 'fortran': 'gfortran', + "osx": { + "c": "clang", + "cxx": "clangxx", + "fortran": "gfortran", }, } -arch_name = subdir.rsplit('-', 1)[-1] +arch_name = context.subdir.rsplit("-", 1)[-1] -SUFFIX_MAP = {'PY': 'python', - 'NPY': 'numpy', - 'LUA': 'lua', - 'PERL': 'perl', - 'R': 'r_base'} +SUFFIX_MAP = { + "PY": "python", + "NPY": "numpy", + "LUA": "lua", + "PERL": "perl", + "R": "r_base", +} @lru_cache(maxsize=None) def _get_default_compilers(platform, py_ver): compilers = DEFAULT_COMPILERS[platform].copy() - if platform == 'win': - if parse_version(py_ver) >= parse_version('3.5'): - py_ver = '3.5' - elif parse_version(py_ver) <= parse_version('3.2'): - py_ver = '2.7' - compilers['c'] = compilers['c'][py_ver] - compilers['cxx'] = compilers['cxx'][py_ver] - compilers = {lang + '_compiler': pkg_name - for lang, pkg_name in compilers.items() if lang != 'vc'} + if platform == "win": + if parse_version(py_ver) >= parse_version("3.5"): + py_ver = "3.5" + elif parse_version(py_ver) <= parse_version("3.2"): + py_ver = "2.7" + compilers["c"] = compilers["c"][py_ver] + compilers["cxx"] = compilers["cxx"][py_ver] + compilers = { + lang + "_compiler": pkg_name + for lang, pkg_name in compilers.items() + if lang != "vc" + } # this one comes after, because it's not a _compiler key - if platform == 'win': - compilers['vc'] = DEFAULT_COMPILERS[platform]['vc'][py_ver] + if platform == "win": + compilers["vc"] = DEFAULT_COMPILERS[platform]["vc"][py_ver] return compilers def get_default_variant(config): base = DEFAULT_VARIANTS.copy() - base['target_platform'] = config.subdir - python = base['python'] if (not hasattr(config, 'variant') or - not config.variant.get('python')) else config.variant['python'] + base["target_platform"] = config.subdir + python = ( + base["python"] + if (not hasattr(config, "variant") or not config.variant.get("python")) + else config.variant["python"] + ) base.update(_get_default_compilers(config.platform, python)) return base def parse_config_file(path, config): - from conda_build.metadata import select_lines, ns_cfg + from .metadata import get_selectors, select_lines + with open(path) as f: contents = f.read() - contents = select_lines(contents, ns_cfg(config), variants_in_place=False) + contents = select_lines(contents, get_selectors(config), variants_in_place=False) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} trim_empty_keys(content) return content @@ -125,9 +153,7 @@ def validate_spec(src, spec): # check for invalid characters errors.extend( - f" {k} key contains an invalid character '-'" - for k in spec - if "-" in k + f" {k} key contains an invalid character '-'" for k in spec if "-" in k ) # check for properly formatted zip_key @@ -148,10 +174,11 @@ def validate_spec(src, spec): # check for duplicate keys unique = set() errors.extend( - " zip_key entry {} in group {} is a duplicate, keys can only occur " - "in one group".format(k, zg) + f" zip_key entry {k} in group {zg} is a duplicate, keys can only occur " + "in one group" # include error if key has already been seen, otherwise add to unique keys - if k in unique else unique.add(k) + if k in unique + else unique.add(k) for zg in zip_keys for k in zg ) @@ -162,13 +189,18 @@ def validate_spec(src, spec): for zg in zip_keys # include error if all zip fields in a zip_group are the same size, # ignore missing fields - if len({len(ensure_list(spec[k])) if k in spec else None for k in zg} - {None}) > 1 + if len( + {len(ensure_list(spec[k])) if k in spec else None for k in zg} - {None} + ) + > 1 ) # filter out None values that were potentially added above errors = list(filter(None, errors)) if errors: - raise ValueError("Variant configuration errors in {}:\n{}".format(src, "\n".join(errors))) + raise ValueError( + "Variant configuration errors in {}:\n{}".format(src, "\n".join(errors)) + ) def find_config_files(metadata_or_path, config): @@ -199,14 +231,14 @@ def find_config_files(metadata_or_path, config): if not files and not config.ignore_system_variants: # user config - if cc_conda_build.get('config_file'): - cfg = resolve(cc_conda_build['config_file']) + if config_file := context.conda_build.get("config_file"): + cfg = resolve(config_file) else: - cfg = resolve(os.path.join('~', "conda_build_config.yaml")) + cfg = resolve(os.path.join("~", "conda_build_config.yaml")) if os.path.isfile(cfg): files.append(cfg) - cfg = resolve('conda_build_config.yaml') + cfg = resolve("conda_build_config.yaml") if os.path.isfile(cfg): files.append(cfg) @@ -220,8 +252,9 @@ def find_config_files(metadata_or_path, config): return files -def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_keys=None, - log_output=True): +def _combine_spec_dictionaries( + specs, extend_keys=None, filter_keys=None, zip_keys=None, log_output=True +): # each spec is a dictionary. Each subsequent spec replaces the previous one. # Only the last one with the key stays. values = {} @@ -237,8 +270,8 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke if not keys or k in keys: if k in extend_keys: # update dictionaries, extend lists - if hasattr(v, 'keys'): - if k in values and hasattr(values[k], 'keys'): + if hasattr(v, "keys"): + if k in values and hasattr(values[k], "keys"): values[k].update(v) else: values[k] = v.copy() @@ -247,17 +280,19 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke values[k].extend(ensure_list(v)) # uniquify values[k] = list(set(values[k])) - elif k == 'zip_keys': + elif k == "zip_keys": v = [subval for subval in v if subval] if not isinstance(v[0], list) and not isinstance(v[0], tuple): v = [v] # should always be a list of lists, but users may specify as just a list values[k] = values.get(k, []) values[k].extend(v) - values[k] = list(list(set_group) for set_group in {tuple(group) - for group in values[k]}) + values[k] = list( + list(set_group) + for set_group in {tuple(group) for group in values[k]} + ) else: - if hasattr(v, 'keys'): + if hasattr(v, "keys"): values[k] = v.copy() else: # default "group" is just this one key. We latch onto other groups if @@ -273,31 +308,45 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke # Otherwise, we filter later. if all(group_item in spec for group_item in keys_in_group): for group_item in keys_in_group: - if len(ensure_list(spec[group_item])) != len(ensure_list(v)): - raise ValueError("All entries associated by a zip_key " - "field must be the same length. In {}, {} and {} are " - "different ({} and {})".format(spec_source, k, group_item, - len(ensure_list(v)), - len(ensure_list(spec[group_item])))) + if len(ensure_list(spec[group_item])) != len( + ensure_list(v) + ): + raise ValueError( + f"All entries associated by a zip_key " + f"field must be the same length. In {spec_source}, {k} and {group_item} " + f"are different ({len(ensure_list(v))} and " + f"{len(ensure_list(spec[group_item]))})" + ) values[group_item] = ensure_list(spec[group_item]) elif k in values: for group_item in keys_in_group: - if group_item in spec and \ - len(ensure_list(spec[group_item])) != len(ensure_list(v)): + if group_item in spec and len( + ensure_list(spec[group_item]) + ) != len(ensure_list(v)): break - if group_item in values and \ - len(ensure_list(values[group_item])) != len(ensure_list(v)): + if group_item in values and len( + ensure_list(values[group_item]) + ) != len(ensure_list(v)): break else: values[k] = v.copy() - missing_subvalues = [subvalue for subvalue in ensure_list(v) if subvalue not in values[k]] - missing_group_items = [group_item for group_item in keys_in_group if group_item not in spec] + missing_subvalues = [ + subvalue + for subvalue in ensure_list(v) + if subvalue not in values[k] + ] + missing_group_items = [ + group_item + for group_item in keys_in_group + if group_item not in spec + ] if len(missing_subvalues): - raise ValueError("variant config in {} is ambiguous because it\n" - "does not fully implement all zipped keys (To be clear: missing {})\n" - "or specifies a subspace that is not fully implemented (To be clear:\n" - ".. we did not find {} from {} in {}:{}).". - format(spec_source, missing_group_items, missing_subvalues, spec, k, values[k])) + raise ValueError( + f"variant config in {spec_source} is ambiguous because it does not fully " + f"implement all zipped keys (missing {missing_group_items}) or specifies a " + f"subspace that is not fully implemented (we did not find {missing_subvalues} " + f"from {spec} in {k}:{values[k]})." + ) return values @@ -311,18 +360,25 @@ def combine_specs(specs, log_output=True): names used in Jinja2 templated recipes. Values can be either single values (strings or integers), or collections (lists, tuples, sets). """ - extend_keys = DEFAULT_VARIANTS['extend_keys'][:] - extend_keys.extend([key for spec in specs.values() if spec - for key in ensure_list(spec.get('extend_keys'))]) + extend_keys = DEFAULT_VARIANTS["extend_keys"][:] + extend_keys.extend( + [ + key + for spec in specs.values() + if spec + for key in ensure_list(spec.get("extend_keys")) + ] + ) # first pass gets zip_keys entries from each and merges them. We treat these specially # below, keeping the size of related fields identical, or else the zipping makes no sense - zip_keys = _combine_spec_dictionaries(specs, extend_keys=extend_keys, - filter_keys=['zip_keys'], - log_output=log_output).get('zip_keys', []) - values = _combine_spec_dictionaries(specs, extend_keys=extend_keys, zip_keys=zip_keys, - log_output=log_output) + zip_keys = _combine_spec_dictionaries( + specs, extend_keys=extend_keys, filter_keys=["zip_keys"], log_output=log_output + ).get("zip_keys", []) + values = _combine_spec_dictionaries( + specs, extend_keys=extend_keys, zip_keys=zip_keys, log_output=log_output + ) return values @@ -337,9 +393,9 @@ def set_language_env_vars(variant): if variant_name in variant: value = str(variant[variant_name]) # legacy compatibility: python should be just first - if env_var_name == 'PY': - value = ''.join(value.split('.')[:2]) - env['CONDA_' + env_var_name] = value + if env_var_name == "PY": + value = "".join(value.split(".")[:2]) + env["CONDA_" + env_var_name] = value return env @@ -354,12 +410,14 @@ def _get_zip_keys(spec): :rtype: set :raise ValueError: 'zip_keys' cannot be standardized """ - zip_keys = spec.get('zip_keys') + zip_keys = spec.get("zip_keys") if not zip_keys: return set() elif islist(zip_keys, uniform=lambda e: isinstance(e, str)): return {frozenset(zip_keys)} - elif islist(zip_keys, uniform=lambda e: islist(e, uniform=lambda e: isinstance(e, str))): + elif islist( + zip_keys, uniform=lambda e: islist(e, uniform=lambda e: isinstance(e, str)) + ): return {frozenset(zg) for zg in zip_keys} raise ValueError("'zip_keys' expect list of string or list of lists of string") @@ -376,10 +434,10 @@ def _get_extend_keys(spec, include_defaults=True): :return: Standardized 'extend_keys' value :rtype: set """ - extend_keys = {'zip_keys', 'extend_keys'} + extend_keys = {"zip_keys", "extend_keys"} if include_defaults: - extend_keys.update(DEFAULT_VARIANTS['extend_keys']) - return extend_keys.union(ensure_list(spec.get('extend_keys'))) + extend_keys.update(DEFAULT_VARIANTS["extend_keys"]) + return extend_keys.union(ensure_list(spec.get("extend_keys"))) def _get_passthru_keys(spec, zip_keys=None, extend_keys=None): @@ -400,7 +458,7 @@ def _get_passthru_keys(spec, zip_keys=None, extend_keys=None): zip_keys = _get_zip_keys(spec) if extend_keys is None: extend_keys = _get_extend_keys(spec) - passthru_keys = {'replacements', 'extend_keys', 'zip_keys'} + passthru_keys = {"replacements", "extend_keys", "zip_keys"} return passthru_keys.union(extend_keys).difference(*zip_keys).intersection(spec) @@ -431,7 +489,7 @@ def filter_by_key_value(variants, key, values, source_name): """variants is the exploded out list of dicts, with one value per key in each dict. key and values come from subsequent variants before they are exploded out.""" reduced_variants = [] - if hasattr(values, 'keys'): + if hasattr(values, "keys"): reduced_variants = variants else: # break this out into a full loop so that we can show filtering output @@ -440,10 +498,10 @@ def filter_by_key_value(variants, key, values, source_name): reduced_variants.append(variant) else: log = get_logger(__name__) - log.debug('Filtering variant with key {key} not matching target value(s) ' - '({tgt_vals}) from {source_name}, actual {actual_val}'.format( - key=key, tgt_vals=values, source_name=source_name, - actual_val=variant.get(key))) + log.debug( + f"Filtering variant with key {key} not matching target value(s) " + f"({values}) from {source_name}, actual {variant.get(key)}" + ) return reduced_variants @@ -505,7 +563,9 @@ def explode_variants(spec): (k,): [ensure_list(v, include_dict=False) for v in ensure_list(spec[k])] for k in explode_keys.difference(*zip_keys) } - explode.update({zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys}) + explode.update( + {zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys} + ) trim_empty_keys(explode) # Cartesian Product of dict of lists @@ -514,7 +574,9 @@ def explode_variants(spec): variants = [] for values in product(*explode.values()): variant = {k: copy(v) for k, v in passthru.items()} - variant.update({k: v for zg, zv in zip(explode, values) for k, v in zip(zg, zv)}) + variant.update( + {k: v for zg, zv in zip(explode, values) for k, v in zip(zg, zv)} + ) variants.append(variant) return variants @@ -534,22 +596,26 @@ def list_of_dicts_to_dict_of_lists(list_of_dicts): squished = OrderedDict() all_zip_keys = set() groups = None - zip_key_groups = (list_of_dicts[0]['zip_keys'] if 'zip_keys' in list_of_dicts[0] and - list_of_dicts[0]['zip_keys'] else []) + zip_key_groups = ( + list_of_dicts[0]["zip_keys"] + if "zip_keys" in list_of_dicts[0] and list_of_dicts[0]["zip_keys"] + else [] + ) if zip_key_groups: - if (isinstance(list_of_dicts[0]['zip_keys'][0], list) or - isinstance(list_of_dicts[0]['zip_keys'][0], tuple)): - groups = list_of_dicts[0]['zip_keys'] + if isinstance(list_of_dicts[0]["zip_keys"][0], list) or isinstance( + list_of_dicts[0]["zip_keys"][0], tuple + ): + groups = list_of_dicts[0]["zip_keys"] else: - groups = [list_of_dicts[0]['zip_keys']] + groups = [list_of_dicts[0]["zip_keys"]] for group in groups: for item in group: all_zip_keys.add(item) for variant in list_of_dicts: for k, v in variant.items(): - if k == 'zip_keys': + if k == "zip_keys": continue - if hasattr(v, 'keys'): + if hasattr(v, "keys"): existing_value = squished.get(k, OrderedDict()) existing_value.update(v) squished[k] = existing_value @@ -565,7 +631,7 @@ def list_of_dicts_to_dict_of_lists(list_of_dicts): values = list(zip(*set(zip(*(squished[key] for key in group))))) for idx, key in enumerate(group): squished[key] = values[idx] - squished['zip_keys'] = zip_key_groups + squished["zip_keys"] = zip_key_groups return squished @@ -573,10 +639,11 @@ def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None) # outputs a tuple of (combined_spec_dict_of_lists, used_spec_file_dict) # # The output of this function is order preserving, unlike get_package_variants - if hasattr(recipedir_or_metadata, 'config'): + if hasattr(recipedir_or_metadata, "config"): config = recipedir_or_metadata.config if not config: - from conda_build.config import Config + from .config import Config + config = Config() files = find_config_files(recipedir_or_metadata, config) @@ -586,10 +653,10 @@ def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None) specs[f] = parse_config_file(f, config) # this is the override of the variants from files and args with values from CLI or env vars - if hasattr(config, 'variant') and config.variant: - specs['config.variant'] = config.variant + if hasattr(config, "variant") and config.variant: + specs["config.variant"] = config.variant if variants: - specs['argument_variants'] = variants + specs["argument_variants"] = variants for f, spec in specs.items(): validate_spec(f, spec) @@ -605,35 +672,51 @@ def filter_combined_spec_to_used_keys(combined_spec, specs): # delete the default specs, so that they don't unnecessarily limit the matrix specs = specs.copy() - del specs['internal_defaults'] + del specs["internal_defaults"] # TODO: act here? combined_spec = explode_variants(combined_spec) + # seen_keys makes sure that a setting from a lower-priority spec doesn't clobber + # the same setting that has been redefined in a higher-priority spec. + seen_keys = set() + # The specs are checked from high to low priority order. for source, source_specs in reversed(specs.items()): for k, vs in source_specs.items(): - if k not in extend_keys: + if k not in extend_keys and k not in seen_keys: # when filtering ends up killing off all variants, we just ignore that. Generally, # this arises when a later variant config overrides, rather than selects a # subspace of earlier configs - combined_spec = (filter_by_key_value(combined_spec, k, vs, source_name=source) or - combined_spec) + combined_spec = ( + filter_by_key_value(combined_spec, k, vs, source_name=source) + or combined_spec + ) + seen_keys.add(k) return combined_spec def get_package_variants(recipedir_or_metadata, config=None, variants=None): - combined_spec, specs = get_package_combined_spec(recipedir_or_metadata, config=config, variants=variants) + combined_spec, specs = get_package_combined_spec( + recipedir_or_metadata, config=config, variants=variants + ) return filter_combined_spec_to_used_keys(combined_spec, specs=specs) -def get_vars(variants, loop_only=False): +@deprecated.argument("24.5", "24.7", "loop_only") +def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]: """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" - special_keys = {'pin_run_as_build', 'zip_keys', 'ignore_version'} - special_keys.update(set(ensure_list(variants[0].get('extend_keys')))) - loop_vars = [k for k in variants[0] if k not in special_keys and - (not loop_only or - any(variant[k] != variants[0][k] for variant in variants[1:]))] - return loop_vars + first, *others = variants + special_keys = { + "pin_run_as_build", + "zip_keys", + "ignore_version", + *ensure_list(first.get("extend_keys")), + } + return { + var + for var in set(first) - special_keys + if any(first[var] != other[var] for other in others) + } @lru_cache(maxsize=None) @@ -642,25 +725,33 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): recipe_lines = recipe_text.splitlines() for v in variant: all_res = [] - compiler_match = re.match(r'(.*?)_compiler(_version)?$', v) - if compiler_match and not selectors_only: - compiler_lang = compiler_match.group(1) - compiler_regex = ( - r"\{\s*compiler\([\'\"]%s[\"\'][^\{]*?\}" % re.escape(compiler_lang) + target_match = re.match(r"(.*?)_(compiler|stdlib)(_version)?$", v) + if target_match and not selectors_only: + target_lang = target_match.group(1) + target_kind = target_match.group(2) + target_lang_regex = re.escape(target_lang) + target_regex = ( + rf"\{{\s*{target_kind}\([\'\"]{target_lang_regex}[\"\'][^\{{]*?\}}" ) - all_res.append(compiler_regex) - variant_lines = [line for line in recipe_lines if v in line or compiler_lang in line] + all_res.append(target_regex) + variant_lines = [ + line for line in recipe_lines if v in line or target_lang in line + ] else: - variant_lines = [line for line in recipe_lines if v in line.replace('-', '_')] + variant_lines = [ + line for line in recipe_lines if v in line.replace("-", "_") + ] if not variant_lines: continue v_regex = re.escape(v) - v_req_regex = '[-_]'.join(map(re.escape, v.split('_'))) - variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex - selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex - conditional_regex = r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" + v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) + variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}" + selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" + conditional_regex = ( + r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" + ) # plain req name, no version spec. Look for end of line after name, or comment or selector - requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex + requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)" if selectors_only: all_res.insert(0, selector_regex) else: @@ -669,29 +760,45 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): all_res = r"|".join(all_res) if any(re.search(all_res, line) for line in variant_lines): used_variables.add(v) - if v in ('c_compiler', 'cxx_compiler'): - if 'CONDA_BUILD_SYSROOT' in variant: - used_variables.add('CONDA_BUILD_SYSROOT') + if v in ("c_stdlib", "c_compiler", "cxx_compiler"): + if "CONDA_BUILD_SYSROOT" in variant: + used_variables.add("CONDA_BUILD_SYSROOT") return used_variables -def find_used_variables_in_shell_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"(^[^$]*?\$\{?\s*%s\s*[\s|\}])" % v - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_shell_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"(^[^$]*?\$\{{?\s*{re.escape(variant)}\s*[\s|\}}])", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } -def find_used_variables_in_batch_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"\%" + v + r"\%" - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_batch_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"\%{re.escape(variant)}\%", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } diff --git a/conda_build/version.py b/conda_build/version.py new file mode 100644 index 0000000000..15d88478dc --- /dev/null +++ b/conda_build/version.py @@ -0,0 +1,159 @@ +# Copyright (C) Donald Stufft and individual contributors +# SPDX-License-Identifier: BSD-2-Clause +""" +This file was partially copied from the packaging.version module before the +LegacyVersion class was removed to continue to support version parsing in +a backward-compatible way where PEP 440 support can't be used. + +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import re +from typing import Iterator, List, Tuple, Union + +from packaging.version import InvalidVersion, Version, _BaseVersion + +LegacyCmpKey = Tuple[int, Tuple[str, ...]] + + +def _parse(version: str) -> Union["_LegacyVersion", "Version"]: + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`_LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return _LegacyVersion(version) + + +class _LegacyVersion(_BaseVersion): + def __init__(self, version: str) -> None: + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self) -> str: + return self._version + + def __repr__(self) -> str: + return f"<_LegacyVersion('{self}')>" + + @property + def public(self) -> str: + return self._version + + @property + def base_version(self) -> str: + return self._version + + @property + def epoch(self) -> int: + return -1 + + @property + def release(self) -> None: + return None + + @property + def pre(self) -> None: + return None + + @property + def post(self) -> None: + return None + + @property + def dev(self) -> None: + return None + + @property + def local(self) -> None: + return None + + @property + def is_prerelease(self) -> bool: + return False + + @property + def is_postrelease(self) -> bool: + return False + + @property + def is_devrelease(self) -> bool: + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s: str) -> Iterator[str]: + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version: str) -> LegacyCmpKey: + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts: List[str] = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) diff --git a/conda_build/windows.py b/conda_build/windows.py index 9bee722f18..00287c50bf 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -2,32 +2,39 @@ # SPDX-License-Identifier: BSD-3-Clause import os import pprint -from os.path import isdir, join, dirname, isfile +from os.path import dirname, isdir, isfile, join # importing setuptools patches distutils so that it knows how to find VC for python 2.7 import setuptools # noqa + # Leverage the hard work done by setuptools/distutils to find vcvarsall using # either the registry or the VS**COMNTOOLS environment variable try: - from distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall - from distutils.msvc9compiler import Reg, WINSDK_BASE + from setuptools._distutils.msvc9compiler import WINSDK_BASE, Reg + from setuptools._distutils.msvc9compiler import ( + find_vcvarsall as distutils_find_vcvarsall, + ) except: # Allow some imports to work for cross or CONDA_SUBDIR usage. pass -from conda_build import environ -from conda_build.utils import (check_call_env, path_prepended, - copy_into, get_logger, write_bat_activation_text) -from conda_build.variants import set_language_env_vars, get_default_variant - +from . import environ +from .utils import ( + check_call_env, + copy_into, + get_logger, + path_prepended, + write_bat_activation_text, +) +from .variants import get_default_variant, set_language_env_vars VS_VERSION_STRING = { - '8.0': 'Visual Studio 8 2005', - '9.0': 'Visual Studio 9 2008', - '10.0': 'Visual Studio 10 2010', - '11.0': 'Visual Studio 11 2012', - '12.0': 'Visual Studio 12 2013', - '14.0': 'Visual Studio 14 2015' + "8.0": "Visual Studio 8 2005", + "9.0": "Visual Studio 9 2008", + "10.0": "Visual Studio 10 2010", + "11.0": "Visual Studio 11 2012", + "12.0": "Visual Studio 12 2013", + "14.0": "Visual Studio 14 2015", } @@ -40,23 +47,24 @@ def fix_staged_scripts(scripts_dir, config): return for fn in os.listdir(scripts_dir): # process all the extensionless files - if not isfile(join(scripts_dir, fn)) or '.' in fn: + if not isfile(join(scripts_dir, fn)) or "." in fn: continue # read as binary file to ensure we don't run into encoding errors, see #1632 - with open(join(scripts_dir, fn), 'rb') as f: + with open(join(scripts_dir, fn), "rb") as f: line = f.readline() # If it's a #!python script - if not (line.startswith(b'#!') and b'python' in line.lower()): + if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print('Adjusting unix-style #! script %s, ' - 'and adding a .bat file for it' % fn) + print(f"Adjusting unix-style #! script {fn}, and adding a .bat file for it") # copy it with a .py extension (skipping that first #! line) - with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo: + with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file - copy_into(join(dirname(__file__), 'cli-%s.exe' % config.host_arch), - join(scripts_dir, fn + '.exe')) + copy_into( + join(dirname(__file__), f"cli-{config.host_arch}.exe"), + join(scripts_dir, fn + ".exe"), + ) # remove the original script os.remove(join(scripts_dir, fn)) @@ -69,21 +77,24 @@ def build_vcvarsall_vs_path(version): Expected versions are of the form {9.0, 10.0, 12.0, 14.0} """ # Set up a load of paths that can be imported from the tests - if 'ProgramFiles(x86)' in os.environ: - PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)'] + if "ProgramFiles(x86)" in os.environ: + PROGRAM_FILES_PATH = os.environ["ProgramFiles(x86)"] else: - PROGRAM_FILES_PATH = os.environ['ProgramFiles'] + PROGRAM_FILES_PATH = os.environ["ProgramFiles"] - flatversion = str(version).replace('.', '') + flatversion = str(version).replace(".", "") vstools = f"VS{flatversion}COMNTOOLS" if vstools in os.environ: - return os.path.join(os.environ[vstools], '..\\..\\VC\\vcvarsall.bat') + return os.path.join(os.environ[vstools], "..\\..\\VC\\vcvarsall.bat") else: # prefer looking at env var; fall back to program files defaults - return os.path.join(PROGRAM_FILES_PATH, - f'Microsoft Visual Studio {version}', 'VC', - 'vcvarsall.bat') + return os.path.join( + PROGRAM_FILES_PATH, + f"Microsoft Visual Studio {version}", + "VC", + "vcvarsall.bat", + ) def msvc_env_cmd(bits, config, override=None): @@ -91,16 +102,23 @@ def msvc_env_cmd(bits, config, override=None): # there's clear user demand, it's not clear that we should invest the # effort into updating a known deprecated function for a new platform. log = get_logger(__name__) - log.warn("Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " - "If this recipe does not use a compiler, this message is safe to ignore. " - "Otherwise, use {{compiler('')}} jinja2 in requirements/build.") + log.warn( + "Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " + "If this recipe does not use a compiler, this message is safe to ignore. " + "Otherwise, use {{compiler('')}} jinja2 in requirements/build." + ) + if bits not in ["64", "32"]: + log.warn(f"The legacy MSVC compiler setup does not support {bits} builds. ") + return "" if override: - log.warn("msvc_compiler key in meta.yaml is deprecated. Use the new" - "variant-powered compiler configuration instead. Note that msvc_compiler" - "is incompatible with the new {{{{compiler('c')}}}} jinja scheme.") + log.warn( + "msvc_compiler key in meta.yaml is deprecated. Use the new" + "variant-powered compiler configuration instead. Note that msvc_compiler" + "is incompatible with the new {{{{compiler('c')}}}} jinja scheme." + ) # this has been an int at times. Make sure it's a string for consistency. bits = str(bits) - arch_selector = 'x86' if bits == '32' else 'amd64' + arch_selector = "x86" if bits == "32" else "amd64" msvc_env_lines = [] @@ -113,25 +131,28 @@ def msvc_env_cmd(bits, config, override=None): # For > 3.5 it literally just skips the validation logic. # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py # for more information. - msvc_env_lines.append('set DISTUTILS_USE_SDK=1') + msvc_env_lines.append("set DISTUTILS_USE_SDK=1") # This is also required to hit the 'don't validate' logic on < 3.5. # For > 3.5 this is ignored. - msvc_env_lines.append('set MSSdk=1') + msvc_env_lines.append("set MSSdk=1") if not version: - py_ver = config.variant.get('python', get_default_variant(config)['python']) + py_ver = config.variant.get("python", get_default_variant(config)["python"]) if int(py_ver[0]) >= 3: - if int(py_ver.split('.')[1]) < 5: - version = '10.0' - version = '14.0' + if int(py_ver.split(".")[1]) < 5: + version = "10.0" + version = "14.0" else: - version = '9.0' + version = "9.0" if float(version) >= 14.0: # For Python 3.5+, ensure that we link with the dynamic runtime. See # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info - msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{}.dll'.format( - version.replace('.', ''))) + msvc_env_lines.append( + "set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{}.dll".format( + version.replace(".", "") + ) + ) vcvarsall_vs_path = build_vcvarsall_vs_path(version) @@ -139,7 +160,7 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # Default argument `arch_selector` is defined above return f'call "{cmd}" {arch}' - vs_major = version.split('.')[0] + vs_major = version.split(".")[0] msvc_env_lines.append(f'set "VS_VERSION={version}"') msvc_env_lines.append(f'set "VS_MAJOR={vs_major}"') msvc_env_lines.append(f'set "VS_YEAR={VS_VERSION_STRING[version][-4:]}"') @@ -147,19 +168,23 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # No Win64 for VS 2019. msvc_env_lines.append(f'set "CMAKE_GENERATOR={VS_VERSION_STRING[version]}"') else: - msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] + - {'64': ' Win64', '32': ''}[bits])) + msvc_env_lines.append( + 'set "CMAKE_GENERATOR={}"'.format( + VS_VERSION_STRING[version] + {"64": " Win64", "32": ""}[bits] + ) + ) # tell msys2 to ignore path conversions for issue-causing windows-style flags in build # See https://github.com/conda-forge/icu-feedstock/pull/5 msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"') msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"') - if version == '10.0': + if version == "10.0": try: - WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'), - 'installationfolder') - WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd') + WIN_SDK_71_PATH = Reg.get_value( + os.path.join(WINSDK_BASE, "v7.1"), "installationfolder" + ) + WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, "Bin", "SetEnv.cmd") - win_sdk_arch = '/Release /x86' if bits == '32' else '/Release /x64' + win_sdk_arch = "/Release /x86" if bits == "32" else "/Release /x64" win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch) # There are two methods of building Python 3.3 and 3.4 extensions (both @@ -173,27 +198,29 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # up the environment does **not EXIT 1** and therefore we must fall # back to attempting to set up VS2010. # DelayedExpansion is required for the SetEnv.cmd - msvc_env_lines.append('Setlocal EnableDelayedExpansion') + msvc_env_lines.append("Setlocal EnableDelayedExpansion") msvc_env_lines.append(win_sdk_cmd) # If the WindowsSDKDir environment variable has not been successfully # set then try activating VS2010 - msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format( - WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path))) + msvc_env_lines.append( + f'if not "%WindowsSDKDir%" == "{WIN_SDK_71_PATH}" ( {build_vcvarsall_cmd(vcvarsall_vs_path)} )' + ) # sdk is not installed. Fall back to only trying VS 2010 except KeyError: msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) - elif version == '9.0': + elif version == "9.0": # Get the Visual Studio 2008 path (not the Visual C++ for Python path) # and get the 'vcvars64.bat' from inside the bin (in the directory above # that returned by distutils_find_vcvarsall) try: - VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)), - 'bin', 'vcvars64.bat') + VCVARS64_VS9_BAT_PATH = os.path.join( + os.path.dirname(distutils_find_vcvarsall(9)), "bin", "vcvars64.bat" + ) # there's an exception if VS or the VC compiler for python are not actually installed. except (KeyError, TypeError): VCVARS64_VS9_BAT_PATH = None - error1 = 'IF %ERRORLEVEL% NEQ 0 {}' + error1 = "IF %ERRORLEVEL% NEQ 0 {}" # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) @@ -201,55 +228,66 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # the amd64 build files, so we call the vcvars64.bat manually, # rather than using the vcvarsall.bat which would try and call the # missing bat file. - if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH: - msvc_env_lines.append(error1.format( - build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH))) + if arch_selector == "amd64" and VCVARS64_VS9_BAT_PATH: + msvc_env_lines.append( + error1.format(build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)) + ) # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+ # by using the logic provided by setuptools - msvc_env_lines.append(error1.format( - build_vcvarsall_cmd(distutils_find_vcvarsall(9)))) + msvc_env_lines.append( + error1.format(build_vcvarsall_cmd(distutils_find_vcvarsall(9))) + ) else: # Visual Studio 14 or otherwise msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) - return '\n'.join(msvc_env_lines) + '\n' + return "\n".join(msvc_env_lines) + "\n" def write_build_scripts(m, env, bld_bat): - env_script = join(m.config.work_dir, 'build_env_setup.bat') + env_script = join(m.config.work_dir, "build_env_setup.bat") if m.noarch == "python": env["PYTHONDONTWRITEBYTECODE"] = True import codecs - with codecs.getwriter('utf-8')(open(env_script, 'wb')) as fo: + + with codecs.getwriter("utf-8")(open(env_script, "wb")) as fo: # more debuggable with echo on - fo.write('@echo on\n') + fo.write("@echo on\n") for key, value in env.items(): - if value != '' and value is not None: + if value != "" and value is not None: fo.write(f'set "{key}={value}"\n') if not m.uses_new_style_compiler_activation: - fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, - override=m.get_value('build/msvc_compiler', None))) + fo.write( + msvc_env_cmd( + bits=m.config.host_arch, + config=m.config, + override=m.get_value("build/msvc_compiler", None), + ) + ) # Reset echo on, because MSVC scripts might have turned it off - fo.write('@echo on\n') + fo.write("@echo on\n") fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) - if m.config.activate and m.name() != 'conda': + if m.config.activate and m.name() != "conda": write_bat_activation_text(fo, m) # bld_bat may have been generated elsewhere with contents of build/script - work_script = join(m.config.work_dir, 'conda_build.bat') + work_script = join(m.config.work_dir, "conda_build.bat") if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() - with codecs.getwriter('utf-8')(open(work_script, 'wb')) as fo: + with codecs.getwriter("utf-8")(open(work_script, "wb")) as fo: fo.write('IF "%CONDA_BUILD%" == "" (\n') fo.write(f" call {env_script}\n") - fo.write(')\n') + fo.write(")\n") fo.write("REM ===== end generated header =====\n") fo.write(data) return work_script, env_script def build(m, bld_bat, stats, provision_only=False): + # TODO: Prepending the prefixes here should probably be guarded by + # if not m.activate_build_script: + # Leaving it as is, for now, since we need a quick, non-disruptive patch release. with path_prepended(m.config.host_prefix): with path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) @@ -261,7 +299,7 @@ def build(m, bld_bat, stats, provision_only=False): # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # - env["PIP_NO_BUILD_ISOLATION"] = 'False' + env["PIP_NO_BUILD_ISOLATION"] = "False" # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables @@ -272,7 +310,7 @@ def build(m, bld_bat, stats, provision_only=False): # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. - env['PIP_CACHE_DIR'] = m.config.pip_cache_dir + env["PIP_CACHE_DIR"] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. @@ -281,23 +319,24 @@ def build(m, bld_bat, stats, provision_only=False): # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) - for name in 'BIN', 'INC', 'LIB': - path = env['LIBRARY_' + name] + for name in "BIN", "INC", "LIB": + path = env["LIBRARY_" + name] if not isdir(path): os.makedirs(path) work_script, env_script = write_build_scripts(m, env, bld_bat) if not provision_only and os.path.isfile(work_script): - cmd = ['cmd.exe', '/d', '/c', os.path.basename(work_script)] + cmd = ["cmd.exe", "/d", "/c", os.path.basename(work_script)] # rewrite long paths in stdout back to their env variables if m.config.debug or m.config.no_rewrite_stdout_env: rewrite_env = None else: rewrite_env = { - k: env[k] - for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env + k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) - check_call_env(cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env) - fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config) + print(f"Rewriting env in output: {pprint.pformat(rewrite_env)}") + check_call_env( + cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env + ) + fix_staged_scripts(join(m.config.host_prefix, "Scripts"), config=m.config) diff --git a/docs/requirements.txt b/docs/requirements.txt index 735d2460cb..58f1311df7 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,20 +1,21 @@ -linkify-it-py==1.0.1 -myst-parser==0.15.2 -Pillow==9.0.1 -PyYAML==5.4.1 -requests==2.26.0 -ruamel.yaml==0.17.16 -Sphinx==4.2.0 -sphinx-argparse==0.3.1 +Pillow==10.0.1 +PyYAML==6.0.1 +Sphinx==7.3.7 +conda-sphinx-theme==0.2.1 +linkify-it-py==2.0.2 +myst-parser==2.0.0 +pylint==2.17.5 +requests==2.31.0 +ruamel.yaml==0.17.32 +sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 -sphinx-rtd-theme==1.0.0 -sphinx-sitemap==2.2.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 +sphinx-sitemap==2.5.1 +sphinx_design==0.5.0 +sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-htmlhelp==2.0.4 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-plantuml==0.21 +sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -pylint==2.11.1 +sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-serializinghtml==1.1.9 diff --git a/docs/scrape_help.py b/docs/scrape_help.py index ac7ffe0e45..66d5af1e57 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -1,21 +1,20 @@ #!/usr/bin/env python # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from subprocess import check_output, PIPE, Popen, STDOUT -from os.path import join, dirname, abspath, isdir -from os import makedirs, pathsep -from collections import OrderedDict -from shlex import quote -from concurrent.futures import ThreadPoolExecutor - -import sys import json import re +import sys +from collections import OrderedDict +from concurrent.futures import ThreadPoolExecutor +from os import makedirs, pathsep +from os.path import abspath, dirname, isdir, join +from shlex import quote +from subprocess import PIPE, STDOUT, Popen, check_output -manpath = join(dirname(__file__), 'build', 'man') +manpath = join(dirname(__file__), "build", "man") if not isdir(manpath): makedirs(manpath) -rstpath = join(dirname(__file__), 'source', 'commands') +rstpath = join(dirname(__file__), "source", "commands") if not isdir(rstpath): makedirs(rstpath) @@ -31,7 +30,7 @@ def run_command(*args, **kwargs): - include_stderr = kwargs.pop('include_stderr', False) + include_stderr = kwargs.pop("include_stderr", False) if include_stderr: stderr_pipe = STDOUT else: @@ -39,29 +38,34 @@ def run_command(*args, **kwargs): p = Popen(*args, stdout=PIPE, stderr=stderr_pipe, **kwargs) out, err = p.communicate() if err is None: - err = b'' - out, err = out.decode('utf-8'), err.decode('utf-8') + err = b"" + out, err = out.decode("utf-8"), err.decode("utf-8") if p.returncode != 0: - print("{!r} failed with error code {}".format(' '.join(map(quote, args[0])), p.returncode), file=sys.stderr) + print( + "{!r} failed with error code {}".format( + " ".join(map(quote, args[0])), p.returncode + ), + file=sys.stderr, + ) elif err: - print("{!r} gave stderr output: {}".format(' '.join(*args), err)) + print("{!r} gave stderr output: {}".format(" ".join(*args), err)) return out def str_check_output(*args, **kwargs): - return check_output(*args, **kwargs).decode('utf-8') + return check_output(*args, **kwargs).decode("utf-8") def conda_help(cache=[]): if cache: return cache[0] - cache.append(str_check_output(['conda', '--help'])) + cache.append(str_check_output(["conda", "--help"])) return cache[0] def conda_command_help(command): - return str_check_output(['conda'] + command.split() + ['--help']) + return str_check_output(["conda"] + command.split() + ["--help"]) def conda_commands(): @@ -71,14 +75,14 @@ def conda_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'command': + if line.strip() == "command": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) return commands @@ -90,25 +94,25 @@ def external_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'other commands:': + if line.strip() == "other commands:": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) # TODO: Parallelize this print("Getting list of external subcommands") - subcommands_re = re.compile(r'\s*\{(.*)\}\s*') + subcommands_re = re.compile(r"\s*\{(.*)\}\s*") # Check for subcommands (like conda skeleton pypi) command_help = {} def get_help(command): command_help[command] = conda_command_help(command) - print("Checked for subcommand help for %s" % command) + print(f"Checked for subcommand help for {command}") with ThreadPoolExecutor(len(commands)) as executor: # list() is needed for force exceptions to be raised @@ -124,8 +128,7 @@ def get_help(command): if start: m = subcommands_re.match(line) if m: - commands.extend([f'{command} {i}' for i in - m.group(1).split(',')]) + commands.extend([f"{command} {i}" for i in m.group(1).split(",")]) break return commands @@ -133,78 +136,86 @@ def get_help(command): def man_replacements(): # XXX: We should use conda-api for this, but it's currently annoying to set the # root prefix with. - info = json.loads(str_check_output(['conda', 'info', '--json'])) + info = json.loads(str_check_output(["conda", "info", "--json"])) # We need to use an ordered dict because the root prefix should be # replaced last, since it is typically a substring of the default prefix - r = OrderedDict([ - (info['default_prefix'], 'default prefix'), - (pathsep.join(info['envs_dirs']), 'envs dirs'), - # For whatever reason help2man won't italicize these on its own - # Note these require conda > 3.7.1 - (info['user_rc_path'], r'\fI\,user .condarc path\/\fP'), - (info['sys_rc_path'], r'\fI\,system .condarc path\/\fP'), - - (info['root_prefix'], r'root prefix'), - ]) + r = OrderedDict( + [ + (info["default_prefix"], "default prefix"), + (pathsep.join(info["envs_dirs"]), "envs dirs"), + # For whatever reason help2man won't italicize these on its own + # Note these require conda > 3.7.1 + (info["user_rc_path"], r"\fI\,user .condarc path\/\fP"), + (info["sys_rc_path"], r"\fI\,system .condarc path\/\fP"), + (info["root_prefix"], r"root prefix"), + ] + ) return r def generate_man(command): - conda_version = run_command(['conda', '--version'], include_stderr=True) + conda_version = run_command(["conda", "--version"], include_stderr=True) - manpage = '' + manpage = "" retries = 5 while not manpage and retries: - manpage = run_command([ - 'help2man', - '--name', 'conda %s' % command, - '--section', '1', - '--source', 'Anaconda, Inc.', - '--version-string', conda_version, - '--no-info', - 'conda %s' % command, - ]) + manpage = run_command( + [ + "help2man", + "--name", + f"conda {command}", + "--section", + "1", + "--source", + "Anaconda, Inc.", + "--version-string", + conda_version, + "--no-info", + f"conda {command}", + ] + ) retries -= 1 if not manpage: - sys.exit("Error: Could not get help for conda %s" % command) + sys.exit(f"Error: Could not get help for conda {command}") replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, 'conda-%s.1' % command.replace(' ', '-')), 'w') as f: + with open(join(manpath, "conda-{}.1".format(command.replace(" ", "-"))), "w") as f: f.write(manpage) - print("Generated manpage for conda %s" % command) + print(f"Generated manpage for conda {command}") def generate_html(command): - command_file = command.replace(' ', '-') + command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen(["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE) + man = Popen(["man", abspath(join(manpath, f"conda-{command_file}.1"))], stdout=PIPE) htmlpage = check_output( [ "man2html", "-bare", # Don't use HTML, HEAD, or BODY tags "title", - "conda-%s" % command_file, + f"conda-{command_file}", "-topm", "0", # No top margin "-botm", "0", # No bottom margin ], - stdin=man.stdout) + stdin=man.stdout, + ) - with open(join(manpath, 'conda-%s.html' % command_file), 'wb') as f: + with open(join(manpath, f"conda-{command_file}.html"), "wb") as f: f.write(htmlpage) - print("Generated html for conda %s" % command) + print(f"Generated html for conda {command}") def write_rst(command, sep=None): - command_file = command.replace(' ', '-') - with open(join(manpath, 'conda-%s.html' % command_file)) as f: + command_file = command.replace(" ", "-") + with open(join(manpath, f"conda-{command_file}.html")) as f: html = f.read() rp = rstpath @@ -212,13 +223,13 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, 'conda-%s.rst' % command_file), 'w') as f: + with open(join(rp, f"conda-{command_file}.rst"), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): - f.write(' ') + f.write(" ") f.write(line) - f.write('\n') - print("Generated rst for conda %s" % command) + f.write("\n") + print(f"Generated rst for conda {command}") def main(): @@ -227,21 +238,21 @@ def main(): # let's just hard-code this for now # build_commands = () build_commands = [ - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'inspect channels', - 'inspect linkages', - 'inspect objects', - 'metapackage', - 'render', - 'skeleton', - 'skeleton cpan', - 'skeleton cran', - 'skeleton luarocks', - 'skeleton pypi', + "build", + "convert", + "develop", + "index", + "inspect", + "inspect channels", + "inspect linkages", + "inspect objects", + "metapackage", + "render", + "skeleton", + "skeleton cpan", + "skeleton cran", + "skeleton luarocks", + "skeleton pypi", ] commands = sys.argv[1:] or core_commands + build_commands @@ -258,5 +269,5 @@ def gen_command(command): write_rst(command) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css deleted file mode 100644 index 95805e211b..0000000000 --- a/docs/source/_static/css/custom.css +++ /dev/null @@ -1,83 +0,0 @@ -@import url("theme.css"); - -.wy-nav-content { - padding: 1.618em 3.236em; - height: 100%; - max-width: 1500px; - /* max-width: 800px; */ - margin: auto; - background-color: #ffffff; -} - -.wy-side-nav-search { - /*background color of the top search bar*/ - background-color: #43B02A; -} - -.wy-nav-side { -/* This relates to the entire color of the sidebar */ - background-color:#EEEEEE; -} - -.wy-menu a:hover { - /*background color of text upon hovering*/ - background: #c9c9c9 -} - -.wy-menu-vertical li.on a:hover,.wy-menu-vertical li.current>a:hover { - /*background color of text upon hovering an open list*/ - background: #c9c9c9 -} - -.wy-menu-vertical { - /* text color of expanded menu items in the sidebar */ - color:#414042; -} - -.section h1 { - /*header 1 text color */ - color: #047704; - } - -.rst-content .toctree-wrapper p.caption, h2, h3, h4, h5, h6, legend { - /*text color of rst content and subheads*/ - color: #414042; -} - -.wy-menu-vertical a { - /* Text color of toc */ - color: #025C02; -} - -.wy-nav-content-wrap { - /* background color of wrap around main content*/ - background-color: white; -} - -.section-title { - /*text color of section titles*/ - color:#078E07; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] pre, .rst-content .linenodiv pre { - /*color of code blocks*/ - background-color: #EEEEEE -} - -h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend { - /*font formats*/ - font-family: "Proxima Nova","Helvetica","Arial",sans-serif; -} -.wy-menu-vertical li.toctree-l1.current>a { - /*text color of toctree*/ - color: #025C02; -} - -.toctree-l1:hover { - background-color: #EEEEEE; -} - -.wy-nav-top { - /*color of nav at top when the window is narrow*/ - background: #43B02A; -} diff --git a/docs/source/concepts/channels.rst b/docs/source/concepts/channels.rst index b43680b2b3..275cc19986 100644 --- a/docs/source/concepts/channels.rst +++ b/docs/source/concepts/channels.rst @@ -2,17 +2,7 @@ Conda channels ============== -The ``conda-build`` options ``-c CHANNEL`` or ``--channel CHANNEL`` configure additional channels to search for packages. - -These are URLs searched in the order they are given (including file:// for local directories). - -Then, the defaults or channels from ``.condarc`` are searched (unless ``--override-channels`` is given). - -You can use 'defaults' to get the default packages for conda, and 'system' to get the system packages, which also takes ``.condarc`` into account. - -You can also use any name and the ``.condarc`` ``channel_alias`` value will be prepended. The default ``channel_alias`` is http://conda.anaconda.org/. - -The option ``--override-channels`` tells to not search default or ``.condarc`` channels. Requires the ``--channel`` or ``-c`` option. +Conda-build supports standard `conda channel`_ behavior. Identical channel and package name problem @@ -49,11 +39,13 @@ will fail with the following error message (the path will be different): is empty. please request that the channel administrator create `noarch/repodata.json` and associated `noarch/repodata.json.bz2` files. -This happens because ``conda-build`` will consider the directory ``./example/`` in your project as a channel. This is by design due to conda's CI servers, where the build path can be long, complicated, and not predictable prior to build. +This happens because conda-build will consider the directory ``./example/`` in your project +as a channel. This is by design due to conda's CI servers, where the build path can be long, +complicated, and not predictable prior to build. There are several ways to resolve this issue. -#. Use the url of the desired channel: +#. Use the URL of the desired channel: .. code-block:: bash @@ -72,4 +64,7 @@ There are several ways to resolve this issue. conda-build ./conda/ -c example/label/main - which technically is the same as ``-c example``, since ``main`` is the default label, but now it won't by mistake find a channel ``example/label/main`` on the local filesystem. + which technically is the same as `-c example`, since main is the default label, + but now it won't mistakenly find a channel ``example/label/main`` on the local filesystem. + +.. _`conda channel`: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/channels.html diff --git a/docs/source/concepts/generating-index.rst b/docs/source/concepts/generating-index.rst index 7b6a7db51a..b512d9c7e5 100644 --- a/docs/source/concepts/generating-index.rst +++ b/docs/source/concepts/generating-index.rst @@ -29,7 +29,7 @@ Channel layout Parts of a channel ------------------ -* Channeldata.json contains metadata about the channel, including: +* ``channeldata.json`` contains metadata about the channel, including: - What subdirs the channel contains. - What packages exist in the channel and what subdirs they are in. @@ -37,13 +37,13 @@ Parts of a channel * Subdirs are associated with platforms. For example, the linux-64 subdir contains packages for linux-64 systems. -* Repodata.json contains an index of the packages in a subdir. Each subdir will - have it's own repodata. +* ``repodata.json`` contains an index of the packages in a subdir. Each subdir will + have its own repodata. * Channels have packages as tarballs under corresponding subdirs. -channeldata.json ----------------- +``channeldata.json`` +-------------------- .. code-block:: bash @@ -74,8 +74,8 @@ channeldata.json ] } -repodata.json -------------- +``repodata.json`` +----------------- .. code-block:: bash @@ -113,18 +113,18 @@ For each subdir: * For all packages that need to be added/updated: * Extract the package to access metadata, including full package name, - mtime, size, and index.json. + file modification time (``mtime``), size, and ``index.json``. * Aggregate package metadata to repodata collection. * Apply repodata hotfixes (patches). -* Compute and save the reduced `current_index.json` index. +* Compute and save the reduced ``current_index.json`` index. Example: Building a channel --------------------------- -To build a local channel and put a package in it, follow the directions below. +To build a local channel and put a package in it, follow the directions below: #. Make the channel directory. @@ -133,7 +133,7 @@ To build a local channel and put a package in it, follow the directions below. $ mkdir local-channel $ cd local-channel -#. Now, download your favorite package. We'll use SciPy in our example. The next steps depend on your platform. +#. Now, download your favorite package. We'll use SciPy in our example. The next steps depend on your platform: #. Windows @@ -144,23 +144,21 @@ To build a local channel and put a package in it, follow the directions below. #. Linux - #. Confirm that you have cURL; if not then install it. + #. Most Linux systems come with ``curl`` pre-installed. Let's install it if you don't already have it. - Most Linux systems come with cURL pre-installed. Let's install it if you don't already have it. - - #. Check if you have cURL + #. Check if you have ``curl``: .. code-block:: bash $ which curl - #. if ``curl`` is not found, then install it: + #. If ``curl`` is not found, then install it: .. code-block:: bash $ conda install curl - #. Create a local copy of this package you want to include in your channel. + #. Create a local copy of the package you want to include in your channel: .. code-block:: bash @@ -183,16 +181,16 @@ To build a local channel and put a package in it, follow the directions below. #. Other - To find the latest SciPy on other platform, go to the `Anaconda Packages file list for SciPy `_. + To find the latest SciPy on a platform not included in the list above, go to the `Anaconda Packages file list for SciPy`_. -#. Run a conda index. This will generate both channeldata.json for the channel and - repodata.json for the linux-64 and osx-64 subdirs, along with some other files. +#. Run a conda index. This will generate both ``channeldata.json`` for the channel and + ``repodata.json`` for the linux-64 and osx-64 subdirs, along with some other files: .. code-block:: bash $ conda index . -#. Check your work by searching the channel. +#. Check your work by searching the channel: .. code-block:: bash @@ -207,29 +205,29 @@ More details behind the scenes Caching package metadata ~~~~~~~~~~~~~~~~~~~~~~~~ -Caching utilizes the existing repodata.json file if it exists. Indexing checks +Caching utilizes the existing ``repodata.json`` file if it exists. Indexing checks which files to update based on which files are new, removed, or changed since -the last repodata.json was created. When a package is new or changed, its +the last ``repodata.json`` was created. When a package is new or changed, its metadata is extracted and cached in the subdir to which the package belongs. The -subfolder is the `.cache` folder. This folder has one file of interest: -`stat.json`, which contains results from the `stat` command for each file. This +subfolder is the ``.cache`` folder. This folder has one file of interest: +``stat.json``, which contains results from the ``stat`` command for each file. This is used for understanding when a file has changed and needs to be updated. In each of the other subfolders, the extracted metadata file for each package is -saved as the original package name, plus a `.json` extension. Having these +saved as the original package name, plus a ``.json`` extension. Having these already extracted can save a lot of time in fully re-creating the index, should that be necessary. -An aside: one design goal of the `.conda` package format was to make indexing as +An aside: one design goal of the ``.conda`` package format was to make indexing as fast as possible. To achieve this, the .conda format separates metadata from the -actual package contents. Where the old `.tar.bz2` container required extracting +actual package contents. Where the old ``.tar.bz2`` container required extracting the entire package to obtain the metadata, the new package format allows extraction of metadata without touching the package contents. This allows -indexing speed to be independent of the package size. Large `.tar.bz2` packages +indexing speed to be independent of the package size. Large ``.tar.bz2`` packages can take a very long time to extract and index. It is generally never necessary to manually alter the cache. To force an update/rescan of all cached packages, you can delete the .cache folder, or you -can delete just the `.cache/stat.json` file. Ideally, you could remove only one +can delete just the ``.cache/stat.json`` file. Ideally, you could remove only one package of interest from the cache, but that functionality does not currently exist. @@ -252,7 +250,7 @@ generated on the fly every time the index is generated. That means that any new packages that have been added since the patch python file was last committed will be picked up and will have hotfixes applied to them where appropriate. -Anaconda applies hotfixes by providing a python file to `conda index` that has +Anaconda applies hotfixes by providing a python file to ``conda index`` that has logic on how to alter metadata. Anaconda's repository of hotfixes is at https://github.com/AnacondaRecipes/repodata-hotfixes @@ -262,16 +260,16 @@ Repodata patches applied from a JSON file Unfortunately, you can't always run your python code directly - other people who host your patches may not allow you to run code. What you can do instead is package the patches as .json files. These will clobber the entries in the -repodata.json when they are applied. +``repodata.json`` when they are applied. This is the approach that conda-forge has to take, for example. Their patch creation code is here: https://github.com/conda-forge/conda-forge-repodata-patches-feedstock/tree/main/recipe -What that code does is to download the current repodata.json, then runs their +What that code does is to download the current ``repodata.json``, then runs their python logic to generate the patch JSON file. Those patches are placed into a location where Anaconda's mirroring tools will find them and apply them to -conda-forge's repodata.json at mirroring time. +conda-forge's ``repodata.json`` at mirroring time. The downside here is that this JSON file is only as new as the last time that the repodata-patches feedstock last generated a package. Any new packages that @@ -284,20 +282,23 @@ Trimming to "current" repodata The number of packages available is always growing. That means conda is always having to do more and more work. To slow down this growth, in conda 4.7, we -added the ability to have alternate repodata.json files that may represent a -subset of the normal repodata.json. One in particular is -`current_repodata.json`, which represents: +added the ability to have alternate ``repodata.json`` files that may represent a +subset of the normal ``repodata.json``. One in particular is +``current_repodata.json``, which represents: 1. the latest version of each package 2. any earlier versions of dependencies needed to make the latest versions satisfiable -current_repodata.json also keeps only one file type: `.conda` where it is -available, and `.tar.bz2` where only `.tar.bz2` is available. +``current_repodata.json`` also keeps only one file type: ``.conda`` where it is +available, and ``.tar.bz2`` where only ``.tar.bz2`` is available. For Anaconda's defaults "main" channel, the current_repodata.json file is -approximately 1/7 the size of repodata.json. This makes downloading the repodata +approximately 1/7 the size of ``repodata.json``. This makes downloading the repodata faster, and it also makes loading the repodata into its python representation faster. For those interested in how this is achieved, please refer to the code at https://github.com/conda/conda-build/blob/90a6de55d8b9e36fc4a8c471b566d356e07436c7/conda_build/index.py#L695-L737 + + +.. _`Anaconda Packages file list for SciPy`: https://anaconda.org/anaconda/scipy/files diff --git a/docs/source/concepts/index.rst b/docs/source/concepts/index.rst index 57d53a490b..f2934b5d21 100644 --- a/docs/source/concepts/index.rst +++ b/docs/source/concepts/index.rst @@ -23,8 +23,8 @@ What is a “package”? * You can use conda-build to build a conda package. -What about channels -------------------- +What about channels? +-------------------- * Channels contain packages. @@ -38,11 +38,3 @@ What about channels * conda is able to install from channels and uses the indexes in the channel to solve for requirements and dependencies. - -Building Anaconda installers ----------------------------- - -* Anaconda(/Miniconda) installers are built with a modified version of constructor. - -* The idea is to build an Anaconda metapackage and bundle it together with some - other packages to build an Anaconda installer. diff --git a/docs/source/concepts/package-naming-conv.rst b/docs/source/concepts/package-naming-conv.rst index 0c6e936f8f..5d3f20f538 100644 --- a/docs/source/concepts/package-naming-conv.rst +++ b/docs/source/concepts/package-naming-conv.rst @@ -3,67 +3,35 @@ Package naming conventions ========================== To facilitate communication and documentation, conda observes the -package naming conventions listed below. +package naming conventions listed below: -.. _package_name: -.. index:: - pair: terminology; package name - seealso: name; package name - -Package name -============ - -The name of a package, without any reference to a particular -version. Conda package names are normalized and they may contain -only lowercase alpha characters, numeric digits, underscores, -hyphens, or dots. In usage documentation, these are referred to -by ``package_name``. - -.. _package_version: -.. index:: - pair: terminology; package version - seealso: name; package version - -Package version -=============== - -A version number or string, often similar to ``X.Y`` or -``X.Y.Z``, but it may take other forms as well. - -.. _build_string: -.. index:: - pair: terminology; build string - seealso: name; build string - -Build string -============ - -An arbitrary string that identifies a particular build of a -package for conda. It may contain suggestive mnemonics, but -these are subject to change, and you should not rely on it or try -to parse it for any specific information. - -.. _canonical_name: -.. index:: - pair: terminology; canonical name - seealso: name; canonical name +.. glossary:: -Canonical name -============== + Package name + The name of a package, without any reference to a particular + version. Conda package names are normalized and they may contain + only lowercase alpha characters, numeric digits, underscores, + hyphens, or dots. In usage documentation, these are referred to + by ``package_name``. -The package name, version, and build string joined together by -hyphens---name-version-buildstring. In usage documentation, these -are referred to by ``canonical_name``. + Package version + A version number or string, often similar to ``X.Y`` or + ``X.Y.Z``, but it may take other forms as well. -.. _filename: -.. index:: - pair: terminology; filename + Build string + An arbitrary string that identifies a particular build of a + package for conda. It may contain suggestive mnemonics, but + these are subject to change, and you should not rely on it or try + to parse it for any specific information. -Filename -======== + Canonical name + The package name, version, and build string joined together by + hyphens: name-version-buildstring. In usage documentation, these + are referred to by ``canonical_name``. -Conda package filenames are canonical names, plus the suffix -``.tar.bz2`` or ``.conda``. + Filename + Conda package filenames are canonical names, plus the suffix + ``.tar.bz2`` or ``.conda``. The following figure compares a canonical name to a filename: @@ -76,8 +44,7 @@ The following figure compares a canonical name to a filename: Conda supports both ``.conda`` and ``.tar.bz2`` package extensions. The ``.conda`` format is generally smaller and more efficient than ``.tar.bz2`` packages. -Read our `blog post `_ -about it to learn more. +Read our `blog post`_ about it to learn more. The build string is created as the package is built. Things that contribute to it are the variants specified either by the command @@ -93,13 +60,16 @@ then the build string is the build number that is specified in the recipe. Package specification ===================== -A package name together with a package version---which may be -partial or absent---joined by an equal sign. +A package name together with a package version — which may be +partial or absent — joined by an equal sign. -EXAMPLES: +Examples: * ``python=2.7.3`` * ``python=2.7`` * ``python`` In usage documentation, these are referred to by ``package_spec``. + + +.. _`blog post`: https://www.anaconda.com/understanding-and-improving-condas-performance/ diff --git a/docs/source/concepts/recipe.rst b/docs/source/concepts/recipe.rst index 7410a0dc11..170f74f2ca 100644 --- a/docs/source/concepts/recipe.rst +++ b/docs/source/concepts/recipe.rst @@ -2,28 +2,24 @@ Conda-build recipes =================== -.. contents:: - :local: - :depth: 2 - -To enable building `conda packages `_, :ref:`install and update conda +To enable building `conda packages`_, :ref:`install and update conda and conda-build `. Building a conda package requires a recipe. A conda-build recipe is a flat directory that contains the following files: -* ``meta.yaml``---A file that contains all the metadata in the +* ``meta.yaml`` — A file that contains all the metadata in the recipe. Only ``package/name`` and ``package/version`` are required. -* ``build.sh``---The script that installs the files for the +* ``build.sh`` — The script that installs the files for the package on macOS and Linux. It is executed using the ``bash`` command. -* ``bld.bat``---The build script that installs the files for the +* ``bld.bat`` — The build script that installs the files for the package on Windows. It is executed using ``cmd``. -* ``run_test.[py,pl,sh,bat]``---An optional Python test file, a +* ``run_test.[py,pl,sh,bat]`` — An optional Python test file, a test script that runs automatically if it is part of the recipe. * Optional patches that are applied to the source. @@ -32,10 +28,19 @@ is a flat directory that contains the following files: be generated by the build scripts. Examples are icon files, readme files and build notes. +Review :doc:`../resources/define-metadata` to see a breakdown of the +components of a recipe, including: + + * Package name + * Package version + * Descriptive metadata + * Where to obtain source code + * How to test the package + .. tip:: When you use the :ref:`conda skeleton ` command, - the first 3 files---``meta.yaml``, ``build.sh``, and - ``bld.bat``---are automatically generated for you. + the first 3 files — ``meta.yaml``, ``build.sh``, and + ``bld.bat`` — are automatically generated for you. Conda-build process =================== @@ -60,25 +65,23 @@ Conda-build performs the following steps: source directory with environment variables set. The build script installs into the build environment. -#. Performs some necessary post-processing steps, such as shebang - and rpath. +#. Performs some necessary post-processing steps, such as adding a shebang + and ``rpath``. #. Creates a conda package containing all the files in the build environment that are new from step 5, along with the necessary conda package metadata. -#. Tests the new conda package if the recipe includes tests: +#. Tests the new conda package — if the recipe includes tests — by doing the following: - #. Deletes the build environment and source directory to ensure that the new conda package does not inadvertantly depend on artifacts not included in the package. + * Deletes the build environment and source directory to ensure that the new conda package does not inadvertantly depend on artifacts not included in the package. - #. Creates a test environment with the package and its - dependencies. + * Creates a test environment with the package and its dependencies. - #. Runs the test scripts. + * Runs the test scripts. -The `conda-recipes -`_ repo -contains example recipes for many conda packages. +The archived `conda-recipes`_ repo, `AnacondaRecipes`_ aggregate repo, +and `conda-forge`_ feedstocks repo contain example recipes for many conda packages. .. caution:: All recipe files, including ``meta.yaml`` and build @@ -87,8 +90,7 @@ contains example recipes for many conda packages. such as passwords into recipes where it could be made public. The ``conda skeleton`` command can help to make -skeleton recipes for common repositories, such as `PyPI -`_. +skeleton recipes for common repositories, such as PyPI_. Deep dive @@ -101,12 +103,12 @@ Templates --------- When you build a conda package, conda-build renders the package -by reading a template in the meta.yaml. See :ref:`jinja-templates`. +by reading a template in the ``meta.yaml``. See :ref:`jinja-templates`. -Templates are filled in using your conda-build config, +Templates are filled in using your ``conda build config``, which shows the matrix of things to build against. The ``conda build config`` determines how many builds it has to do. -For example, defining a conda_build_config.yaml of the form +For example, defining a ``conda_build_config.yaml`` of the form and filling it defines a matrix of 4 packages to build:: foo: @@ -129,9 +131,9 @@ and install all of the build and run dependencies in that environment. Conda-build will indicate where you can successfully build the package. The prefix will take the form:: - /conda-bld//h_env_placeholder… + /conda-bld//h_env_placeholder… -`Conda-forge `_ downloads your package source and then builds the conda +Conda-build downloads your package source and then builds the conda package in the context of the build environment. For example, you may direct it to download from a Git repo or pull down a tarball from another source. See the :ref:`source-section` for more information. @@ -149,64 +151,61 @@ Building Once the content is downloaded, conda-build runs the build step. See the :ref:`meta-build` for more information. The build step runs a script. It can be one that you provided. -See the :ref:`build-script` section for more information. +See the :ref:`build-script` section for more information on this topic. If you do not define the script section, then you can create a -build.sh or a bld.bat file to be run. +``build.sh`` or a ``bld.bat`` file to be run. Prefix replacement ------------------ -When the build environment is created, it is in a placeholder prefix. -When the package is all bundled up, the prefix is set to a dummy prefix. -When conda is ready to install the package, it rewrites the dummy -prefix with the correct one. +The build environment is created in a placeholder prefix. +When the package is bundled, the prefix is set to a "dummy" prefix. +Once conda is ready to install the package, it rewrites the dummy +prefix with the final one. Testing ------- -Once a package is built, conda-build will test it. To do this, it +Once a package is built, conda-build has the ability to test it. To do this, it creates another environment and installs the conda package. The form of this prefix is:: - /conda-bld//_test_env_placeholder… + /conda-bld//_test_env_placeholder… -At this point, conda-build has all of the info from the meta.yaml about +At this point, conda-build has all of the information from ``meta.yaml`` about what its runtime dependencies are, so those dependencies are installed as well. This generates a test runner script with a reference to the -testing meta.yaml that is created. See the :ref:`meta-test` for +testing ``meta.yaml`` that is created. See the :ref:`meta-test` for more information. That file is run for testing. Output metadata --------------- After the package is built and tested, conda-build cleans up the -environments created prior and outputs the metadata. The recipe for +environments created during prior steps and outputs the metadata. The recipe for the package is also added in the output metadata. The metadata directory -is on the top level of the tarball in the ``info`` directory. +is at the top level of the package contents in the ``info`` directory. The metadata contains information about the dependencies of the package and a list of where all of the files in the package go when it is installed. Conda reads that metadata when it needs to install. Running ``conda install`` causes conda to: - * reach out to the repo data containing the dependencies, - * guess the right dependencies, - * install a list of packages, - * unpack the tarball to look at the info, - * verify the file based on metadata in the package, and then - * go through each file in the package and put it in the right location. - - -More information -================ - -Review :doc:`../resources/define-metadata` to see a breakdown of the -components of a recipe, including: - - * Package name. - * Package version. - * Descriptive metadata. - * Where to obtain source code. - * How to test the package. +#. Reach out to the repodata containing the dependencies for the package(s) you are installing. +#. Determine the correct dependencies. +#. Install a list of additional packages determined by those dependencies. +#. For each dependency package being installed: + #. Unpack the tarball to look at the information contained within. + #. Verify the file based on metadata in the package. + #. Go through each file in the package and put it in the right location. + +For additional information on ``conda install``, please visit the conda documentation `deep dive`_ page on that topic. + +.. _`conda packages`: https://conda.io/projects/conda/en/latest/user-guide/concepts/packages.html +.. _`conda-recipes`: https://github.com/continuumio/conda-recipes +.. _`AnacondaRecipes`: https://github.com/AnacondaRecipes/aggregate +.. _`conda-forge`: https://github.com/conda-forge/feedstocks/tree/main/feedstocks +.. _PyPI: https://pypi.python.org/pypi +.. _`deep dive`: https://docs.conda.io/projects/conda/en/stable/dev-guide/deep-dives/install.html diff --git a/docs/source/conf.py b/docs/source/conf.py index c654dff3e8..8680d7451f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,21 +19,18 @@ sys.path.insert(0, os.path.abspath("../..")) +import conda_build # noqa: E402 -os.chdir("../..") -import versioneer # noqa: E402 - -version = versioneer.get_versions()["version"] - -os.chdir("docs") # -- Project information ----------------------------------------------------- project = "conda-build" copyright = "2018, Anaconda, Inc." author = "Anaconda, Inc." +# The short X.Y version. +version = conda_build.__version__ or "dev" # The full version, including alpha/beta/rc tags -release = version +release = conda_build.__version__ or "dev" # -- General configuration --------------------------------------------------- @@ -53,6 +50,7 @@ "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx_sitemap", + "sphinx_design", ] myst_heading_anchors = 3 @@ -91,7 +89,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -107,23 +105,62 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = "conda_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} +html_theme_options = { + # The maximum depth of the table of contents tree. Set this to -1 to allow + # unlimited depth. + "navigation_depth": -1, + "show_prev_next": False, + # Navbar icon links + "navbar_start": ["navbar-logo"], + "use_edit_page_button": True, + "goatcounter_url": "https://docs-conda-io.goatcounter.com/count", + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/conda/conda-build", + "icon": "fa-brands fa-square-github", + "type": "fontawesome", + }, + { + "name": "Element", + "url": "https://matrix.to/#/#conda-build:matrix.org", + "icon": "_static/element_logo.svg", + "type": "local", + }, + { + "name": "Discourse", + "url": "https://conda.discourse.group/", + "icon": "fa-brands fa-discourse", + "type": "fontawesome", + }, + ], +} + +html_context = { + "github_user": "conda", + "github_repo": "conda-build", + "github_version": "main", + "doc_path": "docs/source", + "goatcounter_dashboard_url": "https://docs-conda-io.goatcounter.com", +} html_short_title = "conda-build" -html_show_sourcelink = False -html_favicon = "conda-logo.png" +# html_show_sourcelink = False html_extra_path = ["robots.txt"] # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +# html_static_path = ["_static"] + +# Custom CSS rules +# html_style = "css/custom.css" # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -209,11 +246,9 @@ # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # -- Options for todo extension ---------------------------------------------- # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True - -html_style = "css/custom.css" diff --git a/docs/source/index.rst b/docs/source/index.rst index 6f80b10bb0..12e4d9fdcc 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,10 +1,8 @@ -.. _index: - Conda-build documentation ========================= -Conda-build contains commands and tools to use conda to build your -own packages. +Conda-build contains commands and tools to build your own +conda packages. It also provides helpful tools to constrain or pin versions in recipes. Building a conda package requires :doc:`installing conda-build ` and @@ -14,12 +12,11 @@ from the conda recipe. You can build conda packages from a variety of source code projects, most notably Python. For help packing a Python project, -see the `packaging.python.org tutorial - `_. +see the `packaging.python.org tutorial`_. OPTIONAL: If you are planning to upload your packages to -Anaconda Cloud, you will need an -`Anaconda Cloud `_ account and client. +`anaconda.org`_, you will need to make an anaconda.org account and +`install the Anaconda client`_. .. toctree:: :maxdepth: 1 @@ -30,3 +27,8 @@ Anaconda Cloud, you will need an resources/index release-notes contributing-guide + + +.. _`packaging.python.org tutorial`: https://packaging.python.org/en/latest/tutorials/packaging-projects +.. _`anaconda.org`: https://anaconda.org +.. _`install the Anaconda client`: https://docs.anaconda.com/anaconda/install/ diff --git a/docs/source/install-conda-build.rst b/docs/source/install-conda-build.rst index e8cc6c9425..dd52e930ff 100644 --- a/docs/source/install-conda-build.rst +++ b/docs/source/install-conda-build.rst @@ -9,6 +9,42 @@ To enable building conda packages: * update conda and conda-build +.. _-conda-build-wow: + +Way of working +============== + +For proper functioning, it is strongly recommended to install conda-build in +the conda ``base`` environment. Not doing so may lead to problems. + +Explanation +----------- + +With earlier conda / conda-build versions, it was possible to build packages in +your own defined environment, e.g. ``my_build_env``. This was partly driven by +the general conda recommendation not to use the ``base`` env for normal work; +see `Conda Managing Environments`_ for instance. However, conda-build is better +viewed as part of the conda infrastructure, and not as a normal package. Hence, +installing it in the ``base`` env makes more sense. More information: +`Must conda-build be installed in the base envt?`_ + +Other considerations +-------------------- + +* An installation or update of conda-build (in fact, of any package) in the ``base`` + environment needs to be run from an account with the proper permissions + (i.e., the same permissions as were used to install conda and the base env in + the first place via the Miniconda or Anaconda installers). For example, on + Windows that might mean an account with administrator privileges. + +* `conda-verfiy`_ is a useful package that can also be added to the base + environment in order to remove some warnings generated when conda-build runs. + +* For critical CI/CD projects, you might want to pin to an explicit (but recent) + version of conda-build, i.e. only update to a newer version of conda-build + and conda once they have been first verified "offline". + + .. _install-conda-build: Installing conda-build @@ -18,6 +54,7 @@ To install conda-build, in your terminal window or an Anaconda Prompt, run: .. code-block:: bash + conda activate base conda install conda-build @@ -31,8 +68,14 @@ To update conda and conda-build, in your terminal window or an Anaconda Prompt, .. code-block:: bash + conda activate base conda update conda conda update conda-build For release notes, see the `conda-build GitHub page `_. + + +.. _`Conda Managing Environments`: https://conda.io/projects/conda/en/latest/user-guide/getting-started.html#managing-environments +.. _`conda-verfiy`: https://github.com/conda/conda-verify +.. _`Must conda-build be installed in the base envt?`: https://github.com/conda/conda-build/issues/4995 diff --git a/docs/source/resources/commands/conda-build.rst b/docs/source/resources/commands/conda-build.rst index 7c79ee7c7d..1a8f39dab7 100644 --- a/docs/source/resources/commands/conda-build.rst +++ b/docs/source/resources/commands/conda-build.rst @@ -145,6 +145,9 @@ conda-build Run the post-build logic. Implies --no-test and --noana- conda-upload. + -p, --test-run-post + Run the post-build logic during testing. + --skip-existing Skip recipes for which there already exists an existing build (locally or in the channels). @@ -176,20 +179,36 @@ conda-build Disable force upload to anaconda.org, preventing overwriting any existing packages + --zstd-compression-level {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22} + When building v2 packages, set the compression level used by + conda-package-handling. Defaults to 19. Note that using levels + above 19 is not advised due to high memory consumption. + --no-activate do not activate the build and test envs; just prepend to PATH --no-build-id do not generate unique build folder names. Use if having issues - with paths being too long. + with paths being too long. Deprecated, please use --build-id-pat + instead + + --build-id-pat BUILD_ID_PAT + specify a templated pattern to use as build folder names. Use if + having issues with paths being too long. --croot CROOT Build root folder. Equivalent to CONDA_BLD_PATH, but applies only to this call of conda-build. --verify + run verification on recipes or packages when building + + --no-verify do not run verification on recipes or packages when building + --strict-verify + Exit if any conda-verify check fail, instead of only printing them + --output-folder OUTPUT_FOLDER folder to dump output package to. Package are moved here if build or test succeeds. Destination folder must exist prior to @@ -236,6 +255,16 @@ conda-build built packages. This is currently the default behavior, but will change in conda-build 4.0. + --error-overdepending + Enable error when packages with names beginning lib or which have + run_exports are not auto-loaded by the OSes DSO loading mechanism + by any of the files in this package. + + --no-error-overdepending + Disable error when packages with names beginning lib or which have + run_exports are not auto-loaded by the OSes DSO loading mechanism + by any of the files in this package. + --long-test-prefix Use a long prefix for the test prefix, as well as the build pre- fix. Affects only Linux and Mac. Prefix length matches the diff --git a/docs/source/resources/compiler-tools.rst b/docs/source/resources/compiler-tools.rst index d206d1c947..d4832b5a0c 100644 --- a/docs/source/resources/compiler-tools.rst +++ b/docs/source/resources/compiler-tools.rst @@ -394,6 +394,71 @@ not available. You'd need to create a metapackage ``m2w64-gcc_win-64`` to point at the ``m2w64-gcc`` package, which does exist on the msys2 channel on `repo.anaconda.com `_. +Expressing the relation between compiler and its standard library +================================================================= + +For most languages, certainly for "c" and for "cxx", compiling any given +program *may* create a run-time dependence on symbols from the respective +standard library. For example, the standard library for C on linux is generally +``glibc``, and a core component of your operating system. Conda is not able to +change or supersede this library (it would be too risky to try to). A similar +situation exists on MacOS and on Windows. + +Compiler packages usually have two ways to deal with this dependence: + +* assume the package must be there (like ``glibc`` on linux). +* always add a run-time requirement on the respective stdlib (e.g. ``libcxx`` + on MacOS). + +However, even if we assume the package must be there, the information about the +``glibc`` version is still a highly relevant piece of information, which is +also why it is reflected in the ``__glibc`` +`virtual package `_. + +For example, newer packages may decide over time to increase the lowest version +of ``glibc`` that they support. We therefore need a way to express this +dependence in a way that conda will be able to understand, so that (in +conjunction with the ``__glibc`` virtual package) the environment resolver will +not consider those packages on machines whose ``glibc`` version is too old. + +The way to do this is to use the Jinja2 function ``{{ stdlib('c') }}``, which +matches ``{{ compiler('c') }}`` in as many ways as possible. Let's start again +with the ``conda_build_config.yaml``:: + + c_stdlib: + - sysroot # [linux] + - macosx_deployment_target # [osx] + c_stdlib_version: + - 2.17 # [linux] + - 10.13 # [osx] + +In the recipe we would then use:: + + requirements: + build: + - {{ compiler('c') }} + - {{ stdlib('c') }} + +This would then express that the resulting package requires ``sysroot ==2.17`` +(corresponds to ``glibc``) on linux and ``macosx_deployment_target ==10.13`` on +MacOS in the build environment, respectively. How this translates into a +run-time dependence can be defined in the metadata of the respective conda +(meta-)package which represents the standard library (i.e. those defined under +``c_stdlib`` above). + +In this example, ``sysroot 2.17`` would generate a run-export on +``__glibc >=2.17`` and ``macosx_deployment_target 10.13`` would similarly +generate ``__osx >=10.13``. This way, we enable packages to define their own +expectations about the standard library in a unified way, and without +implicitly depending on some global assumption about what the lower version +on a given platform must be. + +In principle, this facility would make it possible to also express the +dependence on separate stdlib implementations (like ``musl`` instead of +``glibc``), or to remove the need to assume that a C++ compiler always needs to +add a run-export on the C++ stdlib -- it could then be left up to packages +themselves whether they need ``{{ stdlib('cxx') }}`` or not. + Anaconda compilers implicitly add RPATH pointing to the conda environment ========================================================================= diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 904688f305..83ba151382 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -4,11 +4,6 @@ Defining metadata (meta.yaml) ============================= -.. contents:: - :local: - :depth: 1 - - All the metadata in the conda-build recipe is specified in the ``meta.yaml`` file. See the example below: @@ -121,6 +116,18 @@ If an extracted archive contains only 1 folder at its top level, its contents will be moved 1 level up, so that the extracted package contents sit in the root of the work folder. +You can also specify multiple URLs for the same source archive. +They will be attempted in order, should one fail. + +.. code-block:: yaml + + source: + url: + - https://archive.linux.duke.edu/cran/src/contrib/ggblanket_6.0.0.tar.gz + - https://archive.linux.duke.edu/cran/src/contrib/Archive/ggblanket/ggblanket_6.0.0.tar.gz + sha256: cd2181fe3d3365eaf36ff8bbbc90ea9d76c56d40e63386b4eefa0e3120ec6665 + + Source from git --------------- @@ -130,7 +137,7 @@ The git_url can also be a relative path to the recipe directory. source: git_url: https://github.com/ilanschnell/bsdiff4.git - git_rev: 1.1.4 + git_rev: 1.1.4 # (Defaults to "HEAD") git_depth: 1 # (Defaults to -1/not shallow) The depth argument relates to the ability to perform a shallow clone. @@ -162,9 +169,22 @@ Source from svn source: svn_url: https://github.com/ilanschnell/bsdiff - svn_rev: 1.1.4 + svn_rev: 1.1.4 # (defaults to head) svn_ignore_externals: True # (defaults to False) + svn_username: username # Optional, if set must also have svn_password + svn_password: password # Optional, if set must also have svn_username + +To access a restricted SVN repository, specify both ``svn_username`` and ``svn_password``. + +.. caution:: + Storing credentials in plaintext carries risks. Alternatively, consider + using environment variables: + + .. code-block:: yaml + source: + svn_username: {{ environ["SVN_USERNAME"] }} + svn_password: {{ environ["SVN_PASSWORD"] }} Source from a local path ------------------------- @@ -314,6 +334,22 @@ OR * package uses {{ compiler() }} jinja2 function +You can also influence which variables are considered for the hash with: + +.. code-block:: yaml + + build: + force_use_keys: + - package_1 + force_ignore_keys: + - package_2 + +This will ensure that the value of ``package_2`` will *not* be considered for the hash, +and ``package_1`` *will* be, regardless of what conda-build discovers is used by its inspection. + +This may be useful to further split complex multi-output builds, to ensure each package is built, +or to ensure the right package hash when using more complex templating or scripting. + Python entry points ------------------- @@ -712,7 +748,7 @@ implicitly added by host requirements (e.g. libpng exports libpng), and with - libpng Here, because no specific kind of ``run_exports`` is specified, libpng's ``run_exports`` -are considered "weak." This means they will only apply when libpng is in the +are considered "weak". This means they will only apply when libpng is in the host section, when they will add their export to the run section. If libpng were listed in the build section, the ``run_exports`` would not apply to the run section. @@ -724,6 +760,9 @@ listed in the build section, the ``run_exports`` would not apply to the run sect strong: - libgcc +There is also ``run_exports/weak`` which is equivalent to an unspecific kind of +``run_exports`` but useful if you want to define both strong and weak run exports. + Strong ``run_exports`` are used for things like runtimes, where the same runtime needs to be present in the host and the run environment, and exactly which runtime that should be is determined by what's present in the build section. @@ -965,9 +1004,10 @@ words, a Python package would list ``python`` here and an R package would list The PREFIX environment variable points to the host prefix. With respect to activation during builds, both the host and build environments are activated. -The build prefix is activated before the host prefix so that the host prefix -has priority over the build prefix. Executables that don't exist in the host -prefix should be found in the build prefix. +The build prefix is activated *after* the host prefix so that the build prefix, +which always contains native executables for the running platform, has priority +over the host prefix, which is not guaranteed to provide native executables (e.g. +when cross-compiling). As of conda-build 3.1.4, the build and host prefixes are always separate when both are defined, or when ``{{ compiler() }}`` Jinja2 functions are used. The @@ -1097,7 +1137,8 @@ Test files ---------- Test files that are copied from the recipe into the temporary -test directory and are needed during testing. +test directory and are needed during testing. If providing a path, +forward slashes must be used. .. code-block:: yaml @@ -1450,10 +1491,9 @@ explicitly in the script section: script: run_test.py -Test requirements for subpackages are not supported. Instead, -subpackage tests install their runtime requirements---but not the -run requirements for the top-level package---and the test-time -requirements of the top-level package. +Test requirements for subpackages can be specified using the optional +`test/requires` section of subpackage tests. Subpackage tests install +their runtime requirements during the test as well. EXAMPLE: In this example, the test for ``subpackage-name`` installs ``some-test-dep`` and ``subpackage-run-req``, but not @@ -1465,16 +1505,15 @@ installs ``some-test-dep`` and ``subpackage-run-req``, but not run: - some-top-level-run-req - test: - requires: - - some-test-dep - outputs: - name: subpackage-name requirements: - subpackage-run-req test: script: run_test.py + requires: + - some-test-dep + Output type @@ -1532,9 +1571,17 @@ information displays in the Anaconda.org channel. about: home: https://github.com/ilanschnell/bsdiff4 - license: BSD + license: BSD 3-Clause license_file: LICENSE - summary: binary diff and patch using the BSDIFF4-format + license_family: BSD + license_url: https://github.com/bacchusrx/bsdiff4/blob/master/LICENSE + summary: binary diff and patch using the BSDIFF4 format + description: | + This module provides an interface to the BSDIFF4 format, command line interfaces + (bsdiff4, bspatch4) and tests. + dev_url: https://github.com/ilanschnell/bsdiff4 + doc_url: https://bsdiff4.readthedocs.io + doc_source_url: https://github.com/ilanschnell/bsdiff4/blob/main/README.rst License file @@ -1721,7 +1768,7 @@ practice means changing the conda-build source code. See the `_. For more information, see the `Jinja2 template -documentation `_ +documentation `_ and :ref:`the list of available environment variables `. @@ -1734,11 +1781,11 @@ retrieve a fully rendered ``meta.yaml``, use the Loading data from other files ----------------------------- -There are several additional functions available to Jinja2 which can be used +There are several additional functions available to Jinja2, which can be used to load data from other files. These are ``load_setup_py_data``, ``load_file_regex``, ``load_file_data``, and ``load_str_data``. -* ``load_setup_py_data``: Loads data from a ``setup.py`` file. This can be useful to +* ``load_setup_py_data``: Load data from a ``setup.py`` file. This can be useful to obtain metadata such as the version from a project's ``setup.py`` file. For example:: {% set data = load_setup_py_data() %} @@ -1747,16 +1794,23 @@ to load data from other files. These are ``load_setup_py_data``, ``load_file_reg name: foo version: {{ version }} -* ``load_file_regex``: Searches a file for a regular expression and returns the - first match as a Python ``re.Match object``. For example:: +* ``load_file_regex``: Search a file for a regular expression returning the + first match as a Python `re.Match + `_ object. + + For example, using ``load_file_regex(load_file, regex_pattern, from_recipe_dir=False) -> re.Match | None``:: + + {% set version_match = load_file_regex( + load_file="conda_package_streaming/__init__.py", + regex_pattern='^__version__ = "(.+)"') %} + {% set version = version_match[1] %} - {% set readme_heading = load_file_regex(load_file='README.rst', regex_pattern=r'^# (\S+)') %} package: - name: {{ readme_heading.string }} + version: {{ version }} -* ``load_file_data``: You can also parse JSON, TOML, or YAML files and load data - from them. For example you can use this to load poetry configurations from - ``pyproject.toml``. This is especially useful as ``setup.py`` is no longer the +* ``load_file_data``: Parse JSON, TOML, or YAML files and load data + from them. For example, you can use this to load poetry configurations from + ``pyproject.toml``. This is especially useful, as ``setup.py`` is no longer the only standard way to define project metadata (see `PEP 517 `_ and `PEP 518 `_):: @@ -1767,7 +1821,7 @@ to load data from other files. These are ``load_setup_py_data``, ``load_file_reg name: {{ poetry.get('name') }} version: {{ poetry.get('version') }} -* ``load_str_data``: Loads and parses data from a string. This is similar to +* ``load_str_data``: Load and parse data from a string. This is similar to ``load_file_data``, but it takes a string instead of a file as an argument. This may seem pointless at first, but you can use this to pass more complex data structures by environment variables. For example:: @@ -1901,10 +1955,10 @@ variables are booleans. * - osx - True if the platform is macOS. * - arm64 - - True if the platform is macOS and the Python architecture - is arm64. + - True if the platform is either macOS or Windows and the + Python architecture is arm64. * - unix - - True if the platform is either macOS or Linux. + - True if the platform is either macOS or Linux or emscripten. * - win - True if the platform is Windows. * - win32 @@ -1938,6 +1992,11 @@ The use of the Python version selectors, `py27`, `py34`, etc. is discouraged in favor of the more general comparison operators. Additional selectors in this series will not be added to conda-build. +Note that for each subdir with OS and architecture that `conda` supports, +two preprocessing selectors are created for the OS and the architecture separately +except when the architecture is not a valid python expression (`*-32` and `*-64` +in particular). + Because the selector is any valid Python expression, complicated logic is possible: diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index 601b79b6dd..90c09289aa 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -2,18 +2,26 @@ Conda package specification =========================== -.. contents:: - :local: - :depth: 1 - -A conda package is a bzipped tar archive---.tar.bz2---that -contains: +A conda package is an archive file that contains: * Metadata under the ``info/`` directory. * A collection of files that are installed directly into an install prefix. -The format is identical across platforms and operating systems. +There are currently two formats of archives that are supported: + +.. list-table:: + :widths: 15 70 + + * - **Type** + - **Description** + + * - .tar.bz2 + - The original format of conda packages. Is the default output of conda-build. + * - .conda + - 2nd Gen. This is a more compact and thus faster. Can be outputed from conda-build by setting output in ``.condarc`` file. + +The formats are identical across platforms and operating systems. During the install process, all files are extracted into the install prefix, with the exception of the ones in ``info/``. Installing a conda package into an environment is similar to @@ -47,10 +55,10 @@ file is stored in ``repodata.json``, which is the repository index file, hence the name ``index.json``. The JSON object is a dictionary containing the keys shown below. The filename of the conda package is composed of the first 3 values, as in: -``--.tar.bz2``. +``--.tar.bz2`` or ``--.conda``. .. list-table:: - :widths: 15 15 70 + :widths: 15 15 45 * - **Key** - **Type** @@ -64,8 +72,7 @@ conda package is composed of the first 3 values, as in: * - version - string - The package version. May not contain "-". Conda - acknowledges `PEP 440 - `_. + acknowledges `PEP 440 `_. * - build - string @@ -84,15 +91,13 @@ conda package is composed of the first 3 values, as in: * - build_number - integer - - A non-negative integer representing the build number of - the package. + - A non-negative integer representing the build number of the package. + + Unlike the build string, the ``build_number`` is inspected by conda. + + Conda uses it to sort packages that have otherwise identical names and versions to determine the latest one. - Unlike the build string, the ``build_number`` is inspected by - conda. Conda uses it to sort packages that have otherwise - identical names and versions to determine the latest one. - This is important because new builds that contain bug - fixes for the way a package is built may be added to a - repository. + This is important because new builds that contain bug fixes for the way a package is built may be added to a repository. * - depends - list of strings @@ -113,10 +118,12 @@ conda package is composed of the first 3 values, as in: EXAMPLE: ``osx`` - Conda currently does not use this key. Packages for a - specific architecture and platform are usually - distinguished by the repository subdirectory that contains - them---see :ref:`repo-si`. + Conda currently does not use this key. + + Packages for a specific architecture and platform are usually distinguished by the repository subdirectory that contains + them. + + See :ref:`repo-si`. info/files ---------- @@ -263,7 +270,7 @@ the command line with ``conda install``, such as ``conda install python=3.4``. Internally, conda translates the command line syntax to the spec defined in this section. -EXAMPLE: python=3.4 is translated to python 3.4*. +EXAMPLE: python=3.4 is translated to python 3.4.*. ``conda search 'python=3.1'`` does NOT bring up Python 3.10, only Python 3.1.*. Package dependencies are specified using a match specification. A match specification is a space-separated string of 1, 2, or 3 @@ -272,85 +279,67 @@ parts: * The first part is always the exact name of the package. * The second part refers to the version and may contain special - characters: + characters. See table below. - * \| means OR. +* The third part is always the exact build string. When there are + three parts, the second part must be the exact version. - EXAMPLE: ``1.0|1.2`` matches version 1.0 or 1.2. +.. list-table:: Version Special Characters + :widths: 10 40 40 + :header-rows: 1 - * \* matches 0 or more characters in the version string. In - terms of regular expressions, it is the same as ``r'.*'``. + * - Symbol + - Meaning + - Example - EXAMPLE: 1.0|1.4* matches 1.0, 1.4 and 1.4.1b2, but not 1.2. + * - <, >, <=, >= + - Relational operators on versions, which are compared using `PEP-440 `_. + - ``<=1.0`` matches 0.9, 0.9.1, and 1.0, but not 1.0.1. - * <, >, <=, >=, ==, and != are relational operators on versions, - which are compared using - `PEP-440 `_. For example, - ``<=1.0`` matches ``0.9``, ``0.9.1``, and ``1.0``, but not ``1.0.1``. - ``==`` and ``!=`` are exact equality. + * - ==, and != + - Exact equality and not equalities. + - ``==0.5.1`` matches 0.5.1 and not anything else while ``!=0.5.1`` matches everything but. - Pre-release versioning is also supported such that ``>1.0b4`` will match - ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. + * - ~= + - Compatibility Release + - ``~=0.5.3`` is equivalent to ``>=0.5.3, <0.6.0a`` - EXAMPLE: <=1.0 matches 0.9, 0.9.1, and 1.0, but not 1.0.1. + * - \| + - OR + - ``1.0|1.2`` matches version 1.0 or 1.2. - * , means AND. + * - \* + - Matches 0 or more characters in the version string. In terms of regular expressions, it is the same as ``r'.*'``. + - ``1.0|1.4*`` matches 1.0, 1.4 and 1.4.1b2, but not 1.2. - EXAMPLE: >=2,<3 matches all packages in the 2 series. 2.0, - 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + * - , + - AND + - ``>=2,<3`` matches all packages in the 2 series. 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not. - * , has higher precedence than \|, so >=1,<2|>3 means greater - than or equal to 1 AND less than 2 or greater than 3, which - matches 1, 1.3 and 3.0, but not 2.2. +.. hint:: + ``,`` has higher precedence than \|, so >=1,<2|>3 means greater than or equal to 1 AND less than 2 or greater than 3, which matches 1, 1.3 and 3.0, but not 2.2. - Conda parses the version by splitting it into parts separated - by \|. If the part begins with <, >, =, or !, it is parsed as a - relational operator. Otherwise, it is parsed as a version, - possibly containing the "*" operator. +.. note:: + For package match specifications, pre-release versioning is also supported such that ``>1.0b4`` will match ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. -* The third part is always the exact build string. When there are - 3 parts, the second part must be the exact version. +Conda parses the version by splitting it into parts separated +by \|. If the part begins with <, >, =, or !, it is parsed as a +relational operator. Otherwise, it is parsed as a version, +possibly containing the "*" operator. Remember that the version specification cannot contain spaces, as spaces are used to delimit the package, version, and build string in the whole match specification. ``python >= 2.7`` is an -invalid match specification. Furthermore, ``python>=2.7`` is +invalid match specification. However, ``"python >= 2.7"`` (with double or single quotes) is matched as any version of a package named ``python>=2.7``. -When using the command line, put double quotes around any package -version specification that contains the space character or any of -the following characters: <, >, \*, or \|. - -EXAMPLE:: +Examples of Package Specs +------------------------- - conda install numpy=1.11 - conda install numpy==1.11 - conda install "numpy>1.11" - conda install "numpy=1.11.1|1.11.3" - conda install "numpy>=1.8,<2" - - -Examples --------- - -The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or -1.11.3. - -The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but -not 2.0. - -The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, -1.11.2, 1.11.18, and so on. - -The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, -and so on. - -The build string constraint "numpy=1.11.2=*nomkl*" matches the -NumPy 1.11.2 packages without MKL but not the normal MKL NumPy +The build string constraint "numpy=1.11.2=*nomkl*" matches the NumPy 1.11.2 packages without MKL, but not the normal MKL NumPy 1.11.2 packages. -The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches -NumPy 1.11.1 or 1.11.3 built for Python 3.6 but not any versions +The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches NumPy 1.11.1 or 1.11.3 built for Python 3.6, but not any versions of NumPy built for Python 3.5 or Python 2.7. The following are all valid match specifications for @@ -366,3 +355,32 @@ numpy-1.8.1-py27_0: * numpy >=1.8,<2|1.9 * numpy 1.8.1 py27_0 * numpy=1.8.1=py27_0 + +Command Line Match Spec Examples +-------------------------------- + +When using the command line, put double or single quotes around any package +version specification that contains the space character or any of +the following characters: <, >, \*, or \|. + +.. list-table:: Examples + :widths: 30 60 + :header-rows: 1 + + * - Example + - Meaning + + * - ``conda install numpy=1.11`` + - The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, 1.11.2, 1.11.18, and so on. + + * - ``conda install numpy==1.11`` + - The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, and so on. + + * - ``conda install "numpy=1.11.1|1.11.3"`` + - The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or 1.11.3. + + * - ``conda install "numpy>1.11"`` + - Any numpy version 1.12.0a or greater. + + * - ``conda install "numpy>=1.8,<2"`` + - The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but not 2.0. diff --git a/docs/source/resources/tutorial-template.rst b/docs/source/resources/tutorial-template.rst index e8b63add27..69d9784538 100644 --- a/docs/source/resources/tutorial-template.rst +++ b/docs/source/resources/tutorial-template.rst @@ -2,10 +2,6 @@ Tutorial template ================= -.. contents:: - :local: - :depth: 1 - .. _documentation@anaconda.com: documentation@anaconda.com *This document describes the steps for creating* diff --git a/docs/source/resources/variants.rst b/docs/source/resources/variants.rst index d46bdf13e2..90953126ee 100644 --- a/docs/source/resources/variants.rst +++ b/docs/source/resources/variants.rst @@ -49,7 +49,7 @@ meta.yaml contents like: requirements: build: - - python {{ python }} + - python run: - python @@ -89,7 +89,7 @@ map for the content below. requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -154,9 +154,9 @@ map for the content below. requirements: build: - - numpy {{ numpy }} + - numpy run: - - numpy {{ numpy }} + - numpy For legacy compatibility, Python is pinned implicitly without specifying ``{{ python }}`` in your recipe. This is generally intractable to extend to @@ -261,12 +261,12 @@ First, the ``meta.yaml`` file: - name: py-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - python {{ python }} + - python - name: r-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - r-base {{ r_base }} + - r-base Next, the ``conda_build_config.yaml`` file, specifying our build matrix: @@ -323,7 +323,7 @@ your Jinja2 templates. There are two ways that you can feed this information into the API: 1. Pass the ``variants`` keyword argument to API functions. Currently, the - ``build``, ``render``, ``get_output_file_path``, and ``check`` functions + ``build``, ``render``, ``get_output_file_paths``, and ``check`` functions accept this argument. ``variants`` should be a dictionary where each value is a list of versions to iterate over. These are aggregated as detailed in the `Aggregation of multiple variants`_ section below. @@ -343,9 +343,9 @@ Again, with ``meta.yaml`` contents like: requirements: build: - - python {{ python }} + - python run: - - python {{ python }} + - python You could supply a variant to build this recipe like so: @@ -569,7 +569,7 @@ requirements, and a variant that includes 2 NumPy versions: requirements: build: - - numpy {{ numpy }} + - numpy run: - numpy @@ -844,7 +844,7 @@ An example variant/recipe is shown here: requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -890,7 +890,7 @@ function. requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', max_pin='x.x') }} @@ -911,7 +911,7 @@ Each can be passed independently of the other. An example of specifying both: requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', min_pin='x.x', max_pin='x.x') }} @@ -933,7 +933,7 @@ You can also pass the minimum or maximum version directly. These arguments super requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', lower_bound='1.10', upper_bound='3.0') }} diff --git a/docs/source/user-guide/environment-variables.rst b/docs/source/user-guide/environment-variables.rst index 98f99fe15d..f0d002cd27 100644 --- a/docs/source/user-guide/environment-variables.rst +++ b/docs/source/user-guide/environment-variables.rst @@ -4,10 +4,6 @@ Environment variables ===================== -.. contents:: - :local: - :depth: 1 - .. _build-state: Dynamic behavior based on state of build process @@ -53,6 +49,8 @@ inherited from the shell environment in which you invoke environment variable and defaults to the architecture the interpreter running conda was compiled with. + * - BUILD_PREFIX + - Build prefix where command line tools are installed. * - CMAKE_GENERATOR - The CMake generator string for the current build environment. On Linux systems, this is always @@ -92,7 +90,7 @@ inherited from the shell environment in which you invoke - Inherited from your shell environment and augmented with ``$PREFIX/bin``. * - PREFIX - - Build prefix to which the build script should install. + - Host prefix to which the build script should install. * - PKG_BUILDNUM - Build number of the package being built. * - PKG_NAME @@ -110,11 +108,11 @@ inherited from the shell environment in which you invoke is installed only in the host prefix when it is listed as a host requirement. * - PY3K - - ``1`` when Python 3 is installed in the build prefix, + - ``1`` when Python 3 is installed in the host prefix, otherwise ``0``. * - R - - Path to the R executable in the build prefix. R is only - installed in the build prefix when it is listed as a build + - Path to the R executable in the host prefix. R is only + installed in the host prefix when it is listed as a build requirement. * - RECIPE_DIR - Directory of the recipe. @@ -132,7 +130,7 @@ inherited from the shell environment in which you invoke Unix-style packages on Windows, which are usually statically linked to executables, are built in a special ``Library`` -directory under the build prefix. The environment variables +directory under the host prefix. The environment variables listed in the following table are defined only on Windows. .. list-table:: @@ -142,15 +140,15 @@ listed in the following table are defined only on Windows. - Same as PREFIX, but as a Unix-style path, such as ``/cygdrive/c/path/to/prefix``. * - LIBRARY_BIN - - ``\Library\bin``. + - ``%PREFIX%\Library\bin``. * - LIBRARY_INC - - ``\Library\include``. + - ``%PREFIX%\Library\include``. * - LIBRARY_LIB - - ``\Library\lib``. + - ``%PREFIX%\Library\lib``. * - LIBRARY_PREFIX - - ``\Library``. + - ``%PREFIX%\Library``. * - SCRIPTS - - ``\Scripts``. + - ``%PREFIX%\Scripts``. * - VS_MAJOR - The major version number of the Visual Studio version activated within the build, such as ``9``. @@ -196,7 +194,7 @@ defined only on Linux. :widths: 20 80 * - LD_RUN_PATH - - ``/lib``. + - ``$PREFIX/lib``. .. _git-env: diff --git a/docs/source/user-guide/getting-started.rst b/docs/source/user-guide/getting-started.rst index 5642b0aa90..64c82e292e 100644 --- a/docs/source/user-guide/getting-started.rst +++ b/docs/source/user-guide/getting-started.rst @@ -14,12 +14,19 @@ collection helpful. Prerequisites ============= -Before starting the tutorials, you will need to -install `Miniconda or Anaconda `_, -conda-build, and Git. +Before starting the tutorials, you need to install: -After you've installed Miniconda or Anaconda, you can use conda -to install conda-build and Git. +- `Miniconda `_ or `Anaconda `_ +- conda-build +- Git + +The most straightforward way to do this is to install Miniconda or +Anaconda, which contain conda, and then use conda to install conda-build +and Git. Make sure you install these packages into your base environment.:: + + conda install -n base conda-build git + +For more information on installing and updating conda-build, see :doc:`Installing and updating conda-build <../install-conda-build>`. .. _submissions: diff --git a/docs/source/user-guide/index.rst b/docs/source/user-guide/index.rst index ff5787b5a7..09a374aca6 100644 --- a/docs/source/user-guide/index.rst +++ b/docs/source/user-guide/index.rst @@ -17,26 +17,30 @@ variables and wheel files. wheel-files -**Tutorials** +Tutorials +......... The :doc:`tutorials <../user-guide/tutorials/index>` will guide you through -how to build conda packages---whether you're creating a +how to build conda packages — whether you're creating a package with compilers, using conda skeleton, creating from scratch, or building R packages using skeleton CRAN. -**Recipes** +Recipes +....... Conda-build uses :doc:`recipes <../user-guide/recipes/index>` to create conda packages. We have guides on debugging conda recipes, sample recipes for you to use, and information on how to build a package without a recipe. -**Environment variables** +Environment variables +..................... Use our :doc:`environment variables ` guide to understand which environment variables are available, set, and inherited, and how they affect different processes. -**Wheel files** +Wheel files +........... The user guide includes information about :doc:`wheel files ` and how to build conda diff --git a/docs/source/user-guide/recipes/build-without-recipe.rst b/docs/source/user-guide/recipes/build-without-recipe.rst deleted file mode 100644 index a3bfaeebf7..0000000000 --- a/docs/source/user-guide/recipes/build-without-recipe.rst +++ /dev/null @@ -1,162 +0,0 @@ -================================================= -Building a package without a recipe (bdist_conda) -================================================= - -.. contents:: - :local: - :depth: 2 - - -You can use conda-build to build packages for Python to install -rather than conda by using ``setup.py bdist_conda``. This is a -quick way to build packages without using a recipe, but it has -limitations. The script is limited to the Python version used in -the build and it is not as reproducible as using a recipe. We -recommend using a recipe with conda-build. - -.. note:: - If you use Setuptools, you must first import Setuptools and - then import ``conda_build.bdist_conda``, because Setuptools - monkey patches ``distutils.dist.Distribution``. - -EXAMPLE: A minimal ``setup.py`` file using the setup options -``name`` and ``version``: - -.. code:: - - from setuptools import setup - import conda_build.bdist_conda - - setup( - name="foo", - version="1.0", - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - ) - - -Setup options -============= - -You can pass the following options to ``setup()``. You must -include ``distclass=conda_build.bdist_conda.CondaDistribution``. - -Build number ------------- - -The number of the build. Can be overridden on the command line -with the ``--buildnum`` flag. Defaults to ``0``. - -.. code:: - - conda_buildnum=1 - - -Build string ------------- - -The build string. Default is generated automatically from the -Python version, NumPy version---if relevant---and the build -number, such as ``py34_0``. - -.. code:: - - conda_buildstr=py34_0 - - -Import tests ------------- - -Whether to automatically run import tests. The default is -``True``, which runs import tests for all the modules in -``packages``. Also allowed are ``False``, which runs no tests, or -a list of module names to be tested on import. - -.. code:: - - conda_import_tests=False - - -Command line tests ------------------- - -Command line tests to run. Default is ``True``, which runs -``command --help`` for each command in the console_scripts and -gui_scripts entry_points. Also allowed are ``False``, which does -not run any command tests, or a list of command tests to run. - -.. code:: - - conda_command_tests=False - - -Binary files relocatable ------------------------- - -Whether binary files should be made relocatable, using -install_name_tool on macOS or patchelf on Linux. The default is -``True``. - -.. code:: - - conda_binary_relocation=False - -For more information, see :ref:`Making packages relocatable `. - - -Preserve egg directory ----------------------- - -Whether to preserve the egg directory as installed by Setuptools. -The default is ``True`` if the package depends on Setuptools or -has Setuptools entry_points other than console_scripts and -gui_scripts. - -.. code:: - - conda_preserve_egg_dir=False - - -Command line options -==================== - -Build number ------------- - -Set the build number. Defaults to the conda_buildnum passed -to ``setup()`` or ``0``. Overrides any conda_buildnum passed to -``setup()``. - -.. code:: - - --buildnum=1 - - -Notes -===== - -* You must install ``bdist_conda`` into a root conda environment, - as it imports ``conda`` and ``conda_build``. It is included as - part of the ``conda-build`` package. - -* All metadata is gathered from the standard metadata from the - ``setup()`` function. Metadata that are not directly supported - by ``setup()`` can be added using one of the options specified - above. - -* By default, import tests are run for each subpackage specified - by packages, and command line tests ``command --help`` are run - for each ``setuptools entry_points`` command. This is done to - ensure that the package is built correctly. You can disable or - change these using the ``conda_import_tests`` and - ``conda_command_tests`` options specified above. - -* The Python version used in the build must be the same as where - conda is installed, as ``bdist_conda`` uses ``conda-build``. - -* ``bdist_conda`` uses the metadata provided to the ``setup()`` - function. - -* If you want to pass any ``bdist_conda`` specific options to - ``setup()``, in ``setup()`` you must set - ``distclass=conda_build.bdist_conda.CondaDistribution``. diff --git a/docs/source/user-guide/recipes/index.rst b/docs/source/user-guide/recipes/index.rst index d482f27415..876b500396 100644 --- a/docs/source/user-guide/recipes/index.rst +++ b/docs/source/user-guide/recipes/index.rst @@ -8,6 +8,5 @@ conda-build recipes. .. toctree:: :maxdepth: 1 - build-without-recipe sample-recipes debugging diff --git a/docs/source/user-guide/recipes/sample-recipes.rst b/docs/source/user-guide/recipes/sample-recipes.rst index b5b079fec9..c4d2e40649 100644 --- a/docs/source/user-guide/recipes/sample-recipes.rst +++ b/docs/source/user-guide/recipes/sample-recipes.rst @@ -7,21 +7,21 @@ that are not Python related. The first 2 sample recipes, ``boost`` and ``libtiff``, are examples of non-Python libraries, meaning they do not require Python to run or build. -* `boost `_ is an example +* `boost `_ is an example of a popular programming library and illustrates the use of selectors in a recipe. -* `libtiff `_ is +* `libtiff `_ is another example of a compiled library, which shows how conda can apply patches to source directories before building the package. -* `msgpack `_, - `blosc `_, and - `cytoolz `_ +* `msgpack `_, + `blosc `_, and + `cytoolz `_ are examples of Python libraries with extensions. -* `toolz `_, - `sympy `_, - `six `_, and - `gensim `_ are +* `toolz `_, + `sympy `_, + `six `_, and + `gensim `_ are examples of Python-only libraries. ``gensim`` works on Python 2, and all of the others work on both Python 2 and Python 3. diff --git a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst index 5953107423..4d60acc3c9 100644 --- a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst +++ b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst @@ -2,10 +2,10 @@ Building conda packages with conda skeleton =========================================== - -.. contents:: - :local: - :depth: 1 +.. tip:: + We recommend `Grayskull `_, a newer alternative to conda-skeleton, + to generate conda recipes for PyPI packages. Please check out their project page on GitHub + for more information. Overview ======== diff --git a/docs/source/user-guide/tutorials/build-pkgs.rst b/docs/source/user-guide/tutorials/build-pkgs.rst index d10a5411bd..d3d97423d1 100644 --- a/docs/source/user-guide/tutorials/build-pkgs.rst +++ b/docs/source/user-guide/tutorials/build-pkgs.rst @@ -2,11 +2,6 @@ Building conda packages from scratch ==================================== - -.. contents:: - :local: - :depth: 1 - Overview ======== diff --git a/docs/source/user-guide/tutorials/build-r-pkgs.rst b/docs/source/user-guide/tutorials/build-r-pkgs.rst index 7649b1dae2..3837fc736d 100644 --- a/docs/source/user-guide/tutorials/build-r-pkgs.rst +++ b/docs/source/user-guide/tutorials/build-r-pkgs.rst @@ -2,10 +2,6 @@ Building R packages with skeleton CRAN ====================================== -.. contents:: - :local: - :depth: 1 - Overview ======== diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index 006b7dac97..0ec1b71708 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -2,10 +2,6 @@ Building conda packages ======================= -.. contents:: - :local: - :depth: 1 - Overview ======== @@ -15,19 +11,16 @@ SEP and GDAL. Additional Windows-specific instructions are provided in the :ref:`toolkit` section. The final built packages from this tutorial are available on -`Anaconda Cloud `_: +`anaconda.org`_ (formerly known as Anaconda Cloud): -* `SEP `_. +* `SEP`_ -* `GDAL `_. +* `GDAL`_ This tutorial also describes writing recipes. You can see the -final `SEP recipe -`_ -and the `GDAL recipe -`_ -on GitHub in the `conda-build documentation repository -`_. +final `SEP recipe`_ +and the `GDAL recipe`_ +on GitHub in the `conda-build documentation repository`_. Who is this for? ================ @@ -45,9 +38,9 @@ Before you start Before you start, make sure you have installed: - * `Conda `_. - * :ref:`Conda-build `. - * Any compilers you want. + * `Conda`_ + * :ref:`Conda-build ` + * Any compilers you want .. _toolkit: @@ -82,24 +75,19 @@ There are free alternatives available for each version of the VS compilers: * Instead of VS 2008, it is often possible to substitute the - `free Microsoft Visual C++ Compiler for Python 2.7 - `_. + `free Microsoft Visual C++ Compiler for Python 2.7`_. * Instead of VS 2010, it is often possible to substitute the - `free Microsoft Windows SDK for Windows 7 and .NET Framework 4 - `_. + `free Microsoft Windows SDK for Windows 7 and .NET Framework 4`_. -* Make sure that you also install `VS 2010 Service Pack 1 (SP1) - `_. +* Make sure that you also install `VS 2010 Service Pack 1 (SP1)`_. * Due to a bug in the VS 2010 SP1 installer, the compiler tools may be removed during installation of VS 2010 SP1. They can be - restored as described in `Microsoft Visual C++ 2010 Service - Pack 1 Compiler Update for the Windows SDK 7.1 - `_. + restored as described in `Microsoft Visual C++ 2010 Service Pack + 1 Compiler Update for the Windows SDK 7.1`_. -* Visual Studio 2015 has a full-featured, free `Community edition - `_ +* Visual Studio 2015 has a full-featured, free `Community edition`_ for academic research, open source projects, and certain other use cases. @@ -122,22 +110,22 @@ built on Windows 10. Other tools ------------ -Some environments initially lack tools such as patch or Git +Some environments initially lack tools such as ``patch`` or Git that may be needed for some build workflows. -On Windows, these can be installed with conda: +On Windows, these can be installed with conda using the following command: -``conda install git m2-patch`` +``$ conda install git m2-patch`` -On macOS and Linux, replace ``m2-patch`` with patch. +On macOS and Linux, replace ``m2-patch`` with ``patch``. Developing a build strategy ============================ Conda recipes are typically built with a trial-and-error method. -Often the first attempt to build a package fails with compiler +Sometimes, the first attempt to build a package will fail with compiler or linker errors, often caused by missing dependencies. The person writing the recipe then examines these errors and modifies the recipe to include the missing dependencies, usually as part of the @@ -157,18 +145,18 @@ Python 3. Installing only one makes it easier to keep track of the builds, but it is possible to have both installed on the same system at the same time. If you have both installed, use the -``where`` command on Windows, or ``which`` command on Linux to -see which version comes first on PATH since this is the one you will be using:: +``where`` command on Windows, or the ``which`` command on macOS or +Linux, to see which version comes first on ``PATH``, since this is +the one you will be using:: - where python + $ where python To build a package for a Python version other than the one in your Miniconda installation, use the ``--python`` option in the -``conda-build`` command. - -EXAMPLE: To build a Python 3.5 package with Miniconda2:: +``conda-build`` command. For example, to build a Python 3.5 package +with Miniconda2:: - conda-build recipeDirectory --python=3.5 + $ conda-build recipeDirectory --python=3.5 .. note:: Replace ``recipeDirectory`` with the name and path of your @@ -180,29 +168,29 @@ Automated testing After the build, if the recipe directory contains a test file. This test file is named ``run_test.bat`` on Windows, ``run_test.sh`` on macOS or Linux, or ``run_test.py`` on any platform. The file runs to test the package -and any errors are reported. After seeing "check the output," you can -also test if this package was built by using the command:: +and any errors that surface are reported. After seeing "check the output," +you can also test if this package was built by using the command:: $ conda build --test .tar.bz2 .. note:: - Use the :ref:`Test section of the meta.yaml file - ` to move data files from the recipe directory to the - test directory when the test is run. + Use the :ref:`"test" section ` of the ``meta.yaml`` file + to move data files from the recipe directory to the test directory when + the test is run. Building a SEP package with conda and Python 2 or 3 ===================================================== -The `SEP documentation `_ states +The `SEP documentation`_ states that SEP runs on Python 2 and 3, and it depends only on NumPy. -Searching for SEP and PyPI shows that there is already `a PyPI -package for SEP `_. +Searching for SEP on PyPI shows that there is already `a PyPI +package for SEP`_. Because a PyPI package for SEP already exists, the ``conda skeleton`` command can make a skeleton or outline of a -conda recipe based on the PyPI package. Then the recipe outline -can be completed manually and conda can build a conda package +conda recipe based on the PyPI package. The recipe outline +can then be completed manually and conda can build a conda package from the completed recipe. @@ -212,15 +200,15 @@ Install Visual Studio If you have not already done so, install the appropriate version of Visual Studio: -* For Python 3---Visual Studio 2017: +* For Python 3 — Visual Studio 2017: - #. Choose Custom install. + #. Choose the "Custom install" option. #. Under Programming Languages, choose to install Visual C++. -* For Python 2---Visual Studio 2008: +* For Python 2 — Visual Studio 2008: - #. Choose Custom install. + #. Choose the "Custom install" option. #. Choose to install X64 Compilers and Tools. Install Service Pack 1. @@ -230,16 +218,16 @@ Make a conda skeleton recipe #. Run the skeleton command:: - conda skeleton pypi sep + $ conda skeleton pypi sep - The ``skeleton`` command installs into a newly created + The ``skeleton`` command installs into a newly-created directory called ``sep``. #. Go to the ``sep`` directory to view the files:: - cd sep + $ cd sep - One skeleton file has been created: ``meta.yaml`` + You will see that one ``skeleton`` file has been created: ``meta.yaml`` Edit the skeleton files @@ -275,10 +263,11 @@ EXAMPLE: - python - numpy x.x -Notice that there are two types of requirements, host and run. -Host represents packages that need to be specific to the target +Notice that there are two types of requirements, ``host`` and ``run`` +(``build`` is another valid parameter, but is not shown in this example). +``host`` represents packages that need to be specific to the target platform when the target platform is not necessarily the same as -the native build platform. Run represents the dependencies that +the native build platform. ``run`` represents the dependencies that should be installed when the package is installed. .. note:: @@ -292,7 +281,7 @@ should be installed when the package is installed. at build time. -OPTIONAL: Add a test for the built package +Optional: Add a test for the built package ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Adding this optional test will test the package at the end of the @@ -300,9 +289,7 @@ build by making sure that the Python statement ``import sep`` runs successfully: #. Add ``- sep``, checking to be sure that the indentation is - consistent with the rest of the file. - - EXAMPLE: + consistent with the rest of the file: .. code-block:: yaml @@ -317,7 +304,7 @@ Build the package Build the package using the recipe you just created:: - conda build sep + $ conda build sep Check the output @@ -354,16 +341,13 @@ Check the output Building a GDAL package with conda and Python 2 or 3 ==================================================== -This procedure describes how to build a package with Python 2 or Python 3. -Follow the instructions for your preferred version. - To begin, install Anaconda or Miniconda and conda-build. If you are using a -Windows machine, also use conda to install Git and the m2-patch. +Windows machine, also use conda to install Git and ``m2-patch``. .. code-block:: bash - conda install git - conda install m2-patch + $ conda install git + $ conda install m2-patch Because GDAL includes C and C++, building it on Windows requires Visual Studio. This procedure describes how to build a package with Python 2 or @@ -375,42 +359,46 @@ To build a GDAL package: #. Install Visual Studio: - * For Python 3, install `Visual Studio 2017 `_. - Choose Custom install. Under Programming Languages, select workloads that - come from Visual Studio so you choose the Desktop Development with C++ and - Universal Platform C. + * For Python 3, install `Visual Studio 2017`_: + + * Choose "Custom install". + * Under "Programming Languages", select workloads that come from Visual + Studio so that you can choose the Desktop Development with C++ and + Universal Platform C. - * For Python 2, install `Visual Studio 2008 `_. - Choose Custom install. Choose to install X64 Compilers and Tools. - Install Visual Studio 2008 Service Pack 1. + * For Python 2, install `Visual Studio 2008`_: + + * Choose "Custom install". + * Choose to install X64 Compilers and Tools. + * Install Visual Studio 2008 Service Pack 1. #. Install Git. Because the GDAL package sources are retrieved from GitHub for the build, you must install Git:: - conda install git m2-patch conda-build + $ conda install git m2-patch conda-build -#. Get gdal-feedstock. For the purpose of this tutorial, we will be using a recipe from Anaconda:: +#. Get ``gdal-feedstock``. For the purpose of this tutorial, we will be using a recipe from Anaconda:: - git clone https://github.com/AnacondaRecipes/gdal-feedstock.git + $ git clone https://github.com/AnacondaRecipes/gdal-feedstock.git -#. Use conda-build to build the gdal-feedstock:: +#. Use conda-build to build the ``gdal-feedstock``:: - conda build gdal-feedstock + $ conda build gdal-feedstock -#. Check the output to make sure the build completed +#. Check the output to make sure that the build completed successfully. The output also contains the location of the final package file and a command to upload the package to Cloud. For this package in particular, there should be two - packages outputted: libgdal and GDAL. + packages outputted: ``libgdal`` and ``GDAL``. #. In case of any linker or compiler errors, modify the recipe and run it again. -Let’s take a better look at what’s happening inside the gdal-feedstock. -In particular, what is happening in the ``meta.yaml``. +Let's take a closer look at what's happening inside the ``gdal-feedstock``, +specifically in the ``meta.yaml`` file. -The first interesting bit happens under ``source`` in the patches +The first interesting bit happens under ``source`` in the ``patches`` section: :: @@ -422,22 +410,23 @@ section: # disable 12 bit jpeg on Windows as we aren't using internal jpeg - 0003-disable_jpeg12.patch -This section says that when this package is being built on a Windows -platform, apply the following patch files. Notice that the patch files -are in the `patches` directory of the recipe. These patches will only +This section is basically saying "when this package is being built on a Windows +platform, apply the following patch files". Notice that the patch files +are in the ``patches`` directory of the recipe. These patches will only be applied to Windows since the ``# [win]`` selector is applied to each of the patch entries. For more about selectors, see :ref:`preprocess-selectors`. -In the requirements section, notice how there are both a build and -host set of requirements. For this recipe, all the compilers required to -build the package are listed in the build requirements. +In the ``requirements`` section, notice how there are both a ``build`` and +``host`` set of requirements (``run`` is another valid parameter, but is +not shown in this example). For this recipe, all the compilers required to +build the package are listed in the ``build`` requirements. Normally, this section will list out packages required to build the package. GDAL requires CMake on Windows, as well as C compilers. Notice that the C compilers are pulled into the recipe using the syntax ``{{ compiler('c') }}``. Since conda-build 3, conda-build defines a jinja2 function ``compiler()`` to specify compiler packages dynamically. So, using -the ``compiler(‘c’)`` function in a conda recipe will pull in the correct +the ``compiler('c')`` function in a conda recipe will pull in the correct compiler for any build platform. For more information about compilers with conda-build see :ref:`compiler-tools`. @@ -447,9 +436,9 @@ see :ref:`using-your-customized-compiler-package-with-conda-build-3`. Notice that this package has an ``outputs`` section. This section is a list of packages to output as a result of building -this package. In this case, the packages libgdal and GDAL will be built. +this package. In this case, the packages ``libgdal`` and ``GDAL`` will be built. Similar to a normal recipe, the outputs can have build scripts, -tests scripts and requirements specified. +tests scripts, and requirements specified. For more information on how outputs work, see the :ref:`package-outputs`. Now, let's try to build GDAL against some build matrix. @@ -465,12 +454,30 @@ Add the following to your ``conda_build_config.yaml``: Now you can build GDAL using conda-build with the command:: - conda build gdal-feedstock + $ conda build gdal-feedstock Or explicitly set the location of the conda-build variant matrix:: - conda build gdal-feedstock --variant-config-file conda_build_config.yaml + $ conda build gdal-feedstock --variant-config-file conda_build_config.yaml If you want to know more about build variants and ``conda_build_config.yaml``, including how to specify a config file and what can go into it, take a look at :ref:`conda-build-variant-config-files`. + + +.. _`anaconda.org`: https://anaconda.org +.. _SEP: https://anaconda.org/wwarner/sep/files +.. _GDAL: https://anaconda.org/conda-forge/gdal/files +.. _`SEP recipe`: https://github.com/conda-forge/sep-feedstock +.. _`GDAL recipe`: https://github.com/conda-forge/gdal-feedstock +.. _`conda-build documentation repository`: https://github.com/conda/conda-build/tree/main/docs +.. _Conda: https://conda.io/projects/conda/en/latest/user-guide/install/index.html +.. _`free Microsoft Visual C++ Compiler for Python 2.7`: https://www.microsoft.com/en-us/download/details.aspx?id=44266 +.. _`free Microsoft Windows SDK for Windows 7 and .NET Framework 4`: https://www.microsoft.com/en-us/download/details.aspx?id=8279 +.. _`VS 2010 Service Pack 1 (SP1)`: https://www.microsoft.com/en-us/download/details.aspx?id=34677 +.. _`Microsoft Visual C++ 2010 Service Pack 1 Compiler Update for the Windows SDK 7.1`: https://www.microsoft.com/en-us/download/details.aspx?id=4422 +.. _`Community edition`: https://www.visualstudio.com/en-us/products/visual-studio-community-vs.aspx +.. _`SEP documentation`: https://sep.readthedocs.io +.. _`a PyPI package for SEP`: https://pypi.python.org/pypi/sep +.. _`Visual Studio 2017`: https://docs.microsoft.com/en-us/visualstudio/install/install-visual-studio?view=vs-2017 +.. _`Visual Studio 2008`: http://download.microsoft.com/download/E/8/E/E8EEB394-7F42-4963-A2D8-29559B738298/VS2008ExpressWithSP1ENUX1504728.iso diff --git a/docs/source/user-guide/wheel-files.rst b/docs/source/user-guide/wheel-files.rst index 2226f67426..36551f3192 100644 --- a/docs/source/user-guide/wheel-files.rst +++ b/docs/source/user-guide/wheel-files.rst @@ -42,8 +42,8 @@ file does not download or list the .whl file. It is important to ``pip install`` only the one desired package. Whenever possible, install dependencies with conda and not pip. -We strongly recommend using the ``--no-deps`` option in the ``pip install`` -command. +You must use the ``--no-deps`` option in the ``pip install`` command in order +to avoid bundling dependencies into your conda-package. If you run ``pip install`` without the ``--no-deps`` option, pip will often install dependencies in your conda recipe and those dependencies will become diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface new file mode 100644 index 0000000000..d7737f9368 --- /dev/null +++ b/news/5222-deprecating-conda_interface @@ -0,0 +1,32 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) +* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) +* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) +* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) +* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) +* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) +* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) + +### Docs + +* + +### Other + +* diff --git a/news/5233-enable-codspeed b/news/5233-enable-codspeed new file mode 100644 index 0000000000..efb32df4d1 --- /dev/null +++ b/news/5233-enable-codspeed @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Enable CodSpeed benchmarks for select tests. (#5233) diff --git a/news/5237-select_lines-caching b/news/5237-select_lines-caching new file mode 100644 index 0000000000..434a832350 --- /dev/null +++ b/news/5237-select_lines-caching @@ -0,0 +1,19 @@ +### Enhancements + +* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5238-open_recipe b/news/5238-open_recipe new file mode 100644 index 0000000000..9d5d42c4c5 --- /dev/null +++ b/news/5238-open_recipe @@ -0,0 +1,19 @@ +### Enhancements + +* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5251-deprecating-conda_interface b/news/5251-deprecating-conda_interface new file mode 100644 index 0000000000..9f5e48d6cd --- /dev/null +++ b/news/5251-deprecating-conda_interface @@ -0,0 +1,34 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) +* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) +* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) +* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) +* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) +* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) +* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) +* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) +* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) +* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) +* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) +* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) + +### Docs + +* + +### Other + +* diff --git a/news/5252-sign-stubs b/news/5252-sign-stubs new file mode 100644 index 0000000000..3f8bec0b49 --- /dev/null +++ b/news/5252-sign-stubs @@ -0,0 +1,19 @@ +### Enhancements + +* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5271-context b/news/5271-context new file mode 100644 index 0000000000..b4143e00f4 --- /dev/null +++ b/news/5271-context @@ -0,0 +1,19 @@ +### Enhancements + +* Require `conda >=23.7.0`. (#5271) + +### Bug fixes + +* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) + +### Deprecations + +* Deprecate `conda_build.config.Config.override_channels`. Use `conda.base.context.context.override_channels` instead. (#5271) + +### Docs + +* + +### Other + +* diff --git a/news/5276-deprecating-conda_interface b/news/5276-deprecating-conda_interface new file mode 100644 index 0000000000..701b9a53f1 --- /dev/null +++ b/news/5276-deprecating-conda_interface @@ -0,0 +1,56 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) +* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) +* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) +* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) +* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) +* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) +* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) +* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) +* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) +* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) +* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) +* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) +* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) +* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) +* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) +* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) +* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) +* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) +* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) +* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) +* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) +* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) +* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) +* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) +* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) +* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) +* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) +* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) + +### Docs + +* + +### Other + +* diff --git a/news/5280-deprecate-get_vars-loop_only b/news/5280-deprecate-get_vars-loop_only new file mode 100644 index 0000000000..e18d5cfe8c --- /dev/null +++ b/news/5280-deprecate-get_vars-loop_only @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.variants.get_vars(loop_only)`. Unused. (#5280) + +### Docs + +* + +### Other + +* diff --git a/news/5284-deprecate-HashableDict b/news/5284-deprecate-HashableDict new file mode 100644 index 0000000000..c411443395 --- /dev/null +++ b/news/5284-deprecate-HashableDict @@ -0,0 +1,21 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) + +### Docs + +* + +### Other + +* diff --git a/news/5299-remove-deprecations b/news/5299-remove-deprecations new file mode 100644 index 0000000000..c78531ea4d --- /dev/null +++ b/news/5299-remove-deprecations @@ -0,0 +1,39 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Postpone `conda_build.index.channel_data` deprecation. (#5299) +* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299) +* Remove `conda_build.bdist_conda`. (#5299) +* Remove `conda_build.build.have_prefix_files`. (#5299) +* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299) +* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299) +* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299) +* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299) +* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299) +* Remove `conda_build.environ._load_all_json`. (#5299) +* Remove `conda_build.environ._load_json`. (#5299) +* Remove `conda_build.environ.cached_actions`. (#5299) +* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299) +* Remove `conda_build.environ.InvalidEnvironment`. (#5299) +* Remove `conda_build.environ.LINK_ACTION`. (#5299) +* Remove `conda_build.environ.PREFIX_ACTION`. (#5299) +* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299) +* Remove `conda_build.index.DummyExecutor`. (#5299) +* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299) +* Remove `conda_build.index.LOCKFILE_NAME`. (#5299) +* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299) + +### Docs + +* + +### Other + +* diff --git a/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md b/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md deleted file mode 100644 index 1bae50d432..0000000000 --- a/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md +++ /dev/null @@ -1,3 +0,0 @@ -### Bug fixes - -* fix: `conda-build` CLI overrode `condarc`'s `zstd_compression_level` with the default value diff --git a/pyproject.toml b/pyproject.toml index 36619ccf54..a8b907644a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,163 @@ -[tool.black] -target-version = ['py36', 'py37', 'py38'] +[build-system] +build-backend = "hatchling.build" +requires = [ + "hatchling >=1.12.2", + "hatch-vcs >=0.2.0", +] + +[project] +authors = [{name = "Anaconda, Inc.", email = "conda@continuum.io"}] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] +dependencies = [ + "beautifulsoup4", + "chardet", + "conda >=23.7.0", + "conda-index >=0.4.0", + "conda-package-handling >=1.3", + "filelock", + "frozendict >=2.4.2", + "jinja2", + "jsonschema >=4.19", + "libarchive-c", + "menuinst >=2", + "packaging", + "pkginfo", + "psutil", + "pytz", + "pyyaml", + "requests", + "tomli ; python_version<'3.11'", + "tqdm", +] +description = "tools for building conda packages" +dynamic = ["version"] +license = {file = "LICENSE"} +name = "conda-build" +readme = "README.md" +requires-python = ">=3.8" + +[project.entry-points.conda] +conda-build = "conda_build.plugin" + +[project.scripts] +conda-build = "conda_build.cli.main_build:execute" +conda-convert = "conda_build.cli.main_convert:execute" +conda-debug = "conda_build.cli.main_debug:execute" +conda-develop = "conda_build.cli.main_develop:execute" +conda-inspect = "conda_build.cli.main_inspect:execute" +conda-metapackage = "conda_build.cli.main_metapackage:execute" +conda-render = "conda_build.cli.main_render:execute" +conda-skeleton = "conda_build.cli.main_skeleton:execute" + +[project.urls] +changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +documentation = "https://docs.conda.io/projects/conda-build/en/stable/" +repository = "https://github.com/conda/conda-build" + +[tool.coverage.report] +exclude_lines = [ + "if TYPE_CHECKING:", # ignoring type checking imports +] +omit = ["conda_build/skeletons/_example_skeleton.py"] +show_missing = true +skip_covered = true +sort = "Miss" + +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true + +[tool.hatch.build] +include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] + +[tool.hatch.build.hooks.vcs] +version-file = "conda_build/_version.py" + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.version.raw-options] +local_scheme = "dirty-tag" + +[tool.pytest.ini_options] +addopts = [ + "--color=yes", + # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) + "--cov-append", + "--cov-branch", + "--cov-report=term", # print summary table to screen + "--cov-report=xml", # for codecov/codecov-action upload + "--durations=16", # show 16 slowest tests + "--junitxml=junit.xml", + # "--splitting-algorithm=least_duration", # not available yet + # "--store-durations", # not available yet + "--strict-markers", + "--tb=native", + "--xdoctest-modules", + "--xdoctest-style=google", + "-vv", +] +doctest_optionflags = [ + "NORMALIZE_WHITESPACE", + "IGNORE_EXCEPTION_DETAIL", + "ALLOW_UNICODE", + "ELLIPSIS", +] +filterwarnings = [ + # elevate conda's deprecated warning to an error + "error::PendingDeprecationWarning:conda", + "error::DeprecationWarning:conda", + # elevate conda-build's deprecated warning to an error + "error::PendingDeprecationWarning:conda_build", + "error::DeprecationWarning:conda_build", + # ignore numpy.distutils error + 'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data', +] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "benchmark: execute the benchmark tests", +] +minversion = 3.0 +norecursedirs = ["tests/test-recipes/*"] +testpaths = ["tests"] + +[tool.ruff] +target-version = "py38" + +[tool.ruff.lint] +flake8-type-checking = {exempt-modules = [], strict = true} +ignore = [ + "E402", # module level import not at top of file + "E722", # do not use bare 'except' + "E731", # do not assign a lambda expression, use a def +] +pycodestyle = {max-line-length = 120} +# see https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "FA", # flake8-future-annotations + "I", # isort + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle warnings +] diff --git a/recipe/bld.bat b/recipe/bld.bat deleted file mode 100644 index 2eb627d075..0000000000 --- a/recipe/bld.bat +++ /dev/null @@ -1,4 +0,0 @@ -python setup.py install --single-version-externally-managed --record=record.txt -IF %ERRORLEVEL% NEQ 0 exit 1 - -del %SCRIPTS%\conda-init diff --git a/recipe/build.sh b/recipe/build.sh deleted file mode 100644 index cbff269d18..0000000000 --- a/recipe/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -python setup.py install --single-version-externally-managed --record=record.txt diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml new file mode 100644 index 0000000000..3959a519bd --- /dev/null +++ b/recipe/conda_build_config.yaml @@ -0,0 +1,6 @@ +python: + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - '3.12' diff --git a/recipe/meta.yaml b/recipe/meta.yaml index afc694561e..33f8fe9125 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -3,110 +3,97 @@ package: version: {{ GIT_DESCRIBE_TAG }}.{{ GIT_BUILD_STR }} source: + # git_url only captures committed code git_url: ../ build: - number: 0 + script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv entry_points: - - conda-build = conda_build.cli.main_build:main - - conda-convert = conda_build.cli.main_convert:main - - conda-debug = conda_build.cli.main_debug:main - - conda-develop = conda_build.cli.main_develop:main - - conda-index = conda_build.cli.main_index:main - - conda-inspect = conda_build.cli.main_inspect:main - - conda-metapackage = conda_build.cli.main_metapackage:main - - conda-render = conda_build.cli.main_render:main - - conda-skeleton = conda_build.cli.main_skeleton:main + - conda-build = conda_build.cli.main_build:execute + - conda-convert = conda_build.cli.main_convert:execute + - conda-debug = conda_build.cli.main_debug:execute + - conda-develop = conda_build.cli.main_develop:execute + - conda-inspect = conda_build.cli.main_inspect:execute + - conda-metapackage = conda_build.cli.main_metapackage:execute + - conda-render = conda_build.cli.main_render:execute + - conda-skeleton = conda_build.cli.main_skeleton:execute requirements: build: - - git + - git # for source/git_url above + host: - python - - setuptools + - pip + - hatchling >=1.12.2 + - hatch-vcs >=0.2.0 + - wheel run: - beautifulsoup4 - chardet - - conda >=4.13 - # - conda-verify >=3.0.2 # optional as of CB 3.12.0 - - contextlib2 # [py<34] - - enum34 # [py<34] - - pathlib2 # [py<3] + - conda >=23.7.0 + - conda-index >=0.4.0 + - conda-package-handling >=1.3 - filelock - - futures # [py<3] + - frozendict >=2.4.2 - jinja2 - - patchelf # [linux] - - patch >=2.6 # [not win] - - m2-patch >=2.6 # [win] + - jsonschema >=4.19 + - m2-patch >=2.6 # [win] + - menuinst >=2 + - packaging + - patch >=2.6 # [not win] + - patchelf # [linux] - pkginfo - psutil - - py-lief # [not win] + - py-lief - python + - python-libarchive-c + - pytz - pyyaml - requests - - scandir # [py<34] - - setuptools - - six - - glob2 >=0.6 - - pytz - - toml + - tomli # [py<311] - tqdm - - conda-package-handling >=1.3 - - python-libarchive-c + run_constrained: + - conda-verify >=3.1.0 test: - files: - - test_bdist_conda_setup.py + imports: + # high-level import + - conda_build + # new/updated submodules (can be dropped after 1-2 releases) + - conda_build.index requires: - - pytest - - pytest-cov - - pytest-mock - # Optional: you can use pytest-xdist to run the tests in parallel - # - pytest-env # [win] - # - pytest-xdist - # - conda-verify >=3.0.3 # todo once it is released + - setuptools + - pip commands: - - type -P conda-build # [unix] - - where conda-build # [win] - - conda build -h - - type -P conda-convert # [unix] - - where conda-convert # [win] - - conda convert -h - - type -P conda-develop # [unix] - - where conda-develop # [win] - - conda develop -h - - type -P conda-index # [unix] - - where conda-index # [win] - - conda index -h - - type -P conda-inspect # [unix] - - where conda-inspect # [win] - - conda inspect -h - - conda inspect linkages -h \| grep "--name ENVIRONMENT" # [unix] - - conda inspect objects -h \| grep "--name ENVIRONMENT" # [osx] - - type -P conda-metapackage # [unix] - - where conda-metapackage # [win] - - conda metapackage -h - - type -P conda-render # [unix] - - where conda-render # [win] - - conda render -h - - type -P conda-skeleton # [unix] - - where conda-skeleton # [win] - - conda skeleton -h - - where conda-debug # [win] - - conda debug -h - # test that conda sees entry points appropriately in help + - python -m pip check + # subcommands - conda --help - - # Check for bdist_conda - - python test_bdist_conda_setup.py bdist_conda --help - imports: - - conda_build - source_files: - - tests - -outputs: - - type: wheel - - name: {{ PKG_NAME }} + - conda build --help + - conda convert --help + - conda develop --help + - conda inspect --help + - conda inspect linkages --help # [unix] + - conda inspect objects --help # [osx] + - conda metapackage --help + - conda render --help + - conda skeleton --help + - conda debug --help + # entrypoints + - conda-build --help + - conda-convert --help + - conda-develop --help + - conda-inspect --help + - conda-inspect linkages --help # [unix] + - conda-inspect objects --help # [osx] + - conda-metapackage --help + - conda-render --help + - conda-skeleton --help + - conda-debug --help about: - home: https://github.com/conda/conda-build - license: BSD 3-clause + home: https://conda.org + license: BSD-3-Clause + license_file: LICENSE + summary: Canary release of conda-build + doc_url: https://conda.io/projects/conda-build/en/latest/ + dev_url: https://github.com/conda/conda-build diff --git a/recipe/run_test.py b/recipe/run_test.py deleted file mode 100644 index 42a35beb73..0000000000 --- a/recipe/run_test.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build - -print('conda_build.__version__: %s' % conda_build.__version__) diff --git a/recipe/test_bdist_conda_setup.py b/recipe/test_bdist_conda_setup.py deleted file mode 100644 index 3f26e18b06..0000000000 --- a/recipe/test_bdist_conda_setup.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup -import conda_build.bdist_conda - -setup( - name="package", - version="1.0.0", - distclass=conda_build.bdist_conda.CondaDistribution, -) diff --git a/rever.xsh b/rever.xsh index a0d145cebd..577ecfa980 100644 --- a/rever.xsh +++ b/rever.xsh @@ -1,7 +1,7 @@ $ACTIVITIES = ["authors", "changelog"] # Basic settings -$PROJECT = $GITHUB_REPO = $(basename $(git remote get-url origin)).split('.')[0] +$PROJECT = $GITHUB_REPO = $(basename $(git remote get-url origin)).split('.')[0].strip() $GITHUB_ORG = "conda" # Authors settings @@ -26,3 +26,9 @@ $CHANGELOG_CATEGORIES = [ $CHANGELOG_CATEGORY_TITLE_FORMAT = "### {category}\n\n" $CHANGELOG_AUTHORS_TITLE = "Contributors" $CHANGELOG_AUTHORS_FORMAT = "* @{github}\n" + +try: + # allow repository to customize synchronized-from-infa rever config + from rever_overrides import * +except ImportError: + pass diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 3f6eb0840f..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,47 +0,0 @@ -[flake8] -# leave 180 line length for historical/incremental pre-commit purposes -# rely on black/darker to enforce 88 standard -max-line-length = 180 -# E122: Continuation line missing indentation or outdented -# E123: Closing bracket does not match indentation of opening bracket's line -# E126: Continuation line over-indented for hanging indent -# E127: Continuation line over-indented for visual indent -# E128: Continuation line under-indented for visual indent -# E203: Whitespace before ':' [required by black/darker] -# E722: Do not use bare except, specify exception instead -# E731: Do not assign a lambda expression, use a def -# W503: Line break occurred before a binary operator -# W504: Line break occurred after a binary operator -# W605: Invalid escape sequence 'x' -ignore = E122,E123,E126,E127,E128,E203,E731,E722,W503,W504,W605 -exclude = build,conda_build/_version.py,tests,recipe,.git,versioneer.py,conda,relative,benchmarks,.asv,docs,rever - -[tool:pytest] -norecursedirs= tests/test-recipes .* *.egg* build dist recipe -addopts = - --junitxml=junit.xml - --ignore setup.py - --ignore run_test.py - --cov-report term-missing - --tb native - --strict - --strict-markers - --durations=0 -log_level = DEBUG -env = - PYTHONHASHSEED=0 -markers = - serial: execute test serially (to avoid race conditions) - slow: execute the slow tests if active - sanity: execute the sanity tests - no_default_testing_config: used internally to disable monkeypatching for testing_config - -[versioneer] -VCS = git -versionfile_source = conda_build/_version.py -versionfile_build = conda_build/_version.py -tag_prefix = -parentdir_prefix = conda-build- - -[bdist_wheel] -universal=1 diff --git a/setup.py b/setup.py deleted file mode 100755 index 2a52da6420..0000000000 --- a/setup.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from pathlib import Path -from setuptools import setup - -import versioneer - -# Don't proceed with 'unknown' in version -version_dict = versioneer.get_versions() -if version_dict["error"]: - raise RuntimeError(version_dict["error"]) - -deps = [ - "conda", - "requests", - "filelock", - "pyyaml", - "jinja2", - "pkginfo", - "beautifulsoup4", - "chardet", - "pytz", - "toml", - "tqdm", - "psutil", - "six", - "libarchive-c", - "setuptools", - # "conda-package-handling", # remove comment once released on PyPI - "glob2", -] - -# We cannot build lief for Python 2.7 on Windows (unless we use mingw-w64 for it, which -# would be a non-trivial amount of work). -# .. lief is missing the egg-info directory so we cannot do this .. besides it is not on -# pypi. -# if sys.platform != 'win-32' or sys.version_info >= (3, 0): -# deps.extend(['lief']) - -setup( - name="conda-build", - version=version_dict["version"], - cmdclass=versioneer.get_cmdclass(), - author="Continuum Analytics, Inc.", - author_email="conda@continuum.io", - url="https://github.com/conda/conda-build", - license="BSD-3-Clause", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - ], - python_requires=">=3.6", - description="tools for building conda packages", - long_description=Path("README.md").read_text(), - packages=[ - "conda_build", - "conda_build.cli", - "conda_build.skeletons", - "conda_build.os_utils", - ], - entry_points={ - "console_scripts": [ - "conda-build = conda_build.cli.main_build:main", - "conda-convert = conda_build.cli.main_convert:main", - "conda-develop = conda_build.cli.main_develop:main", - "conda-index = conda_build.cli.main_index:main", - "conda-inspect = conda_build.cli.main_inspect:main", - "conda-metapackage = conda_build.cli.main_metapackage:main", - "conda-render = conda_build.cli.main_render:main", - "conda-skeleton = conda_build.cli.main_skeleton:main", - "conda-debug = conda_build.cli.main_debug:main", - ], - "distutils.commands": [ - "bdist_conda = conda_build.bdist_conda:bdist_conda", - ], - }, - install_requires=deps, - package_data={"conda_build": ["templates/*", "cli-*.exe"]}, - zip_safe=False, -) diff --git a/tests/archives/flask-0.11.1-py_0.tar.bz2 b/tests/archives/flask-0.11.1-py_0.tar.bz2 index 1e6d8779a7..0f1994b8b3 100644 Binary files a/tests/archives/flask-0.11.1-py_0.tar.bz2 and b/tests/archives/flask-0.11.1-py_0.tar.bz2 differ diff --git a/tests/archives/fly-2.5.2-0.tar.bz2 b/tests/archives/fly-2.5.2-0.tar.bz2 index b302f1f5b0..fc7ed60e2c 100644 Binary files a/tests/archives/fly-2.5.2-0.tar.bz2 and b/tests/archives/fly-2.5.2-0.tar.bz2 differ diff --git a/tests/archives/nano-2.4.1-0.tar.bz2 b/tests/archives/nano-2.4.1-0.tar.bz2 index d441ecd1cf..dcc0c21a77 100644 Binary files a/tests/archives/nano-2.4.1-0.tar.bz2 and b/tests/archives/nano-2.4.1-0.tar.bz2 differ diff --git a/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 b/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 index 99f04561bd..c60e421824 100644 Binary files a/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 and b/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 differ diff --git a/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 b/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 index 11eac8a959..e01aaaa397 100644 Binary files a/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 and b/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 differ diff --git a/tests/bdist-recipe/bin/test-script-setup.py b/tests/bdist-recipe/bin/test-script-setup.py deleted file mode 100644 index f125947783..0000000000 --- a/tests/bdist-recipe/bin/test-script-setup.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build_test -conda_build_test - -print("Test script setup.py") - -if __name__ == "__main__": - from conda_build_test import manual_entry - manual_entry.main() diff --git a/tests/bdist-recipe/conda_build_test/__init__.py b/tests/bdist-recipe/conda_build_test/__init__.py deleted file mode 100644 index 3574c4128a..0000000000 --- a/tests/bdist-recipe/conda_build_test/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -conda build test package -""" -print("conda_build_test has been imported") diff --git a/tests/bdist-recipe/conda_build_test/empty.py b/tests/bdist-recipe/conda_build_test/empty.py deleted file mode 100644 index 3f48e8b789..0000000000 --- a/tests/bdist-recipe/conda_build_test/empty.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause diff --git a/tests/bdist-recipe/conda_build_test/manual_entry.py b/tests/bdist-recipe/conda_build_test/manual_entry.py deleted file mode 100644 index fb15342cd3..0000000000 --- a/tests/bdist-recipe/conda_build_test/manual_entry.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -def main(): - import argparse - - # Just picks them up from `sys.argv`. - parser = argparse.ArgumentParser( - description="Basic parser." - ) - parser.parse_args() - - print("Manual entry point") diff --git a/tests/bdist-recipe/setup.py b/tests/bdist-recipe/setup.py deleted file mode 100644 index d9aac68996..0000000000 --- a/tests/bdist-recipe/setup.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup -import conda_build.bdist_conda - -setup( - name="conda-build-test-project", - version='1.0', - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - conda_features=[], - author="Continuum Analytics, Inc.", - url="https://github.com/conda/conda-build", - license="BSD", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - ], - description="test package for testing conda-build", - packages=['conda_build_test'], - scripts=[ - 'bin/test-script-setup.py', - ], -) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py new file mode 100644 index 0000000000..9f4ce1cbb0 --- /dev/null +++ b/tests/cli/test_main_build.py @@ -0,0 +1,563 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +import re +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +from conda.exceptions import PackagesNotFoundError + +from conda_build import api +from conda_build.cli import main_build, main_render +from conda_build.config import ( + Config, + zstd_compression_level_default, +) +from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.os_utils.external import find_executable +from conda_build.utils import get_build_folders, on_win, package_has_file + +from ..utils import metadata_dir +from ..utils import reset_config as _reset_config + +if TYPE_CHECKING: + from pytest import FixtureRequest, MonkeyPatch + from pytest_mock import MockerFixture + + from conda_build.metadata import MetaData + + +@pytest.mark.sanity +def test_build_empty_sections(conda_build_test_recipe_envvar: str): + args = [ + "--no-anaconda-upload", + os.path.join(metadata_dir, "empty_sections"), + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + + +@pytest.mark.serial +def test_build_add_channel(): + """This recipe requires the conda_build_test_requirement package, which is + only on the conda_build_test channel. This verifies that the -c argument + works.""" + + args = [ + "-c", + "conda_build_test", + "--no-activate", + "--no-anaconda-upload", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + main_build.execute(args) + + +def test_build_without_channel_fails(testing_workdir): + # remove the conda forge channel from the arguments and make sure that we fail. If we don't, + # we probably have channels in condarc, and this is not a good test. + args = [ + "--no-anaconda-upload", + "--no-activate", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + with pytest.raises(DependencyNeedsBuildingError): + main_build.execute(args) + + +def test_no_filename_hash(testing_workdir, testing_metadata, capfd): + api.output_yaml(testing_metadata, "meta.yaml") + args = ["--output", testing_workdir, "--old-build-string"] + main_render.execute(args) + output, error = capfd.readouterr() + assert not re.search("h[0-9a-f]{%d}" % testing_metadata.config.hash_length, output) + + args = [ + "--no-anaconda-upload", + "--no-activate", + testing_workdir, + "--old-build-string", + ] + main_build.execute(args) + output, error = capfd.readouterr() + assert not re.search( + "test_no_filename_hash.*h[0-9a-f]{%d}" % testing_metadata.config.hash_length, + output, + ) + assert not re.search( + "test_no_filename_hash.*h[0-9a-f]{%d}" % testing_metadata.config.hash_length, + error, + ) + + +def test_build_output_build_path( + testing_workdir, testing_metadata, testing_config, capfd +): + api.output_yaml(testing_metadata, "meta.yaml") + testing_config.verbose = False + testing_config.debug = False + args = ["--output", testing_workdir] + main_build.execute(args) + test_path = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "test_build_output_build_path-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert test_path == output.rstrip(), error + assert error == "" + + +def test_build_output_build_path_multiple_recipes( + testing_workdir, testing_metadata, testing_config, capfd +): + api.output_yaml(testing_metadata, "meta.yaml") + testing_config.verbose = False + skip_recipe = os.path.join(metadata_dir, "build_skip") + args = ["--output", testing_workdir, skip_recipe] + + main_build.execute(args) + + test_path = lambda pkg: os.path.join( + testing_config.croot, testing_config.host_subdir, pkg + ) + test_paths = [ + test_path("test_build_output_build_path_multiple_recipes-1.0-1.tar.bz2"), + ] + + output, error = capfd.readouterr() + # assert error == "" + assert output.rstrip().splitlines() == test_paths, error + + +def test_slash_in_recipe_arg_keeps_build_id( + testing_workdir: str, testing_config: Config +): + args = [ + os.path.join(metadata_dir, "has_prefix_files"), + "--croot", + testing_config.croot, + "--no-anaconda-upload", + ] + main_build.execute(args) + + output = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-has-prefix-files-1.0-0.tar.bz2", + ) + data = package_has_file(output, "binary-has-prefix", refresh_mode="forced") + assert data + if hasattr(data, "decode"): + data = data.decode("UTF-8") + assert "conda-build-test-has-prefix-files_1" in data + + +@pytest.mark.sanity +@pytest.mark.skipif(on_win, reason="prefix is always short on win.") +def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): + recipe_path = os.path.join(metadata_dir, "_test_long_test_prefix") + args = [recipe_path, "--no-anaconda-upload"] + main_build.execute(args) + + args.append("--no-long-test-prefix") + with pytest.raises(SystemExit): + main_build.execute(args) + + +def test_build_no_build_id(testing_workdir: str, testing_config: Config): + args = [ + os.path.join(metadata_dir, "has_prefix_files"), + "--no-build-id", + "--croot", + testing_config.croot, + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + + output = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-has-prefix-files-1.0-0.tar.bz2", + ) + data = package_has_file(output, "binary-has-prefix", refresh_mode="forced") + assert data + if hasattr(data, "decode"): + data = data.decode("UTF-8") + assert "has_prefix_files_1" not in data + + +def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_config): + """Test that building two recipes in one CLI call separates the build environment for each""" + os.makedirs("recipe1") + os.makedirs("recipe2") + api.output_yaml(testing_metadata, "recipe1/meta.yaml") + with open("recipe1/run_test.py", "w") as f: + f.write( + "import os; assert 'test_build_multiple_recipes' in os.getenv('PREFIX')" + ) + testing_metadata.meta["package"]["name"] = "package2" + api.output_yaml(testing_metadata, "recipe2/meta.yaml") + with open("recipe2/run_test.py", "w") as f: + f.write("import os; assert 'package2' in os.getenv('PREFIX')") + args = ["--no-anaconda-upload", "recipe1", "recipe2"] + main_build.execute(args) + + +def test_build_output_folder(testing_workdir: str, testing_metadata: MetaData): + api.output_yaml(testing_metadata, "meta.yaml") + + out = Path(testing_workdir, "out") + out.mkdir(parents=True) + + args = [ + testing_workdir, + "--no-build-id", + "--croot", + testing_workdir, + "--no-activate", + "--no-anaconda-upload", + "--output-folder", + str(out), + ] + main_build.execute(args) + + assert ( + out / testing_metadata.config.host_subdir / testing_metadata.pkg_fn() + ).is_file() + + +def test_build_source(testing_workdir: str): + args = [ + os.path.join(metadata_dir, "_pyyaml_find_header"), + "--source", + "--no-build-id", + "--croot", + testing_workdir, + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + assert Path(testing_workdir, "work", "setup.py").is_file() + + +@pytest.mark.serial +def test_purge(testing_workdir, testing_metadata): + """ + purge clears out build folders - things like some_pkg_12048309850135 + + It does not clear out build packages from folders like osx-64 or linux-64. + """ + api.output_yaml(testing_metadata, "meta.yaml") + outputs = api.build(testing_workdir, notest=True) + args = ["purge"] + main_build.execute(args) + dirs = get_build_folders(testing_metadata.config.croot) + assert not dirs + # make sure artifacts are kept - only temporary folders get nuked + assert all(os.path.isfile(fn) for fn in outputs) + + +@pytest.mark.serial +def test_purge_all( + testing_workdir: str, testing_metadata: MetaData, tmp_path: Path +) -> None: + """ + purge-all clears out build folders as well as build packages in the osx-64 folders and such + """ + api.output_yaml(testing_metadata, "meta.yaml") + testing_metadata.config.croot = str(tmp_path) + outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) + args = ["purge-all", f"--croot={tmp_path}"] + main_build.execute(args) + assert not get_build_folders(testing_metadata.config.croot) + assert not any(os.path.isfile(fn) for fn in outputs) + + +@pytest.mark.serial +def test_no_force_upload( + mocker: MockerFixture, + monkeypatch: MonkeyPatch, + testing_workdir: str | os.PathLike | Path, + testing_metadata: MetaData, + request: FixtureRequest, +): + # this is nearly identical to tests/test_api_build.py::test_no_force_upload + # only difference is this tests `conda_build.cli.main_build.execute` + request.addfinalizer(_reset_config) + call = mocker.patch("subprocess.call") + anaconda = find_executable("anaconda") + + # render recipe + api.output_yaml(testing_metadata, "meta.yaml") + pkg = api.get_output_file_paths(testing_metadata) + + # mock Config.set_keys to always set anaconda_upload to True + # conda's Context + conda_build's MetaData & Config objects interact in such an + # awful way that mocking these configurations is ugly and confusing, all of it + # needs major refactoring + set_keys = Config.set_keys # store original method + monkeypatch.setattr( + Config, + "set_keys", + lambda self, **kwargs: set_keys(self, **{**kwargs, "anaconda_upload": True}), + ) + + # check for normal upload + main_build.execute(["--no-force-upload", testing_workdir]) + call.assert_called_once_with([anaconda, "upload", *pkg]) + call.reset_mock() + + # check for force upload + main_build.execute([testing_workdir]) + call.assert_called_once_with([anaconda, "upload", "--force", *pkg]) + + +@pytest.mark.slow +def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): + monkeypatch.setenv("CONDA_PY", "36") + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + api.output_yaml(testing_metadata, "meta.yaml") + outputs = api.build(testing_workdir, notest=True) + assert any("py36" in output for output in outputs) + + +def test_build_skip_existing( + testing_workdir, + capfd, + mocker, + conda_build_test_recipe_envvar: str, +): + # build the recipe first + empty_sections = os.path.join(metadata_dir, "empty_sections") + args = ["--no-anaconda-upload", empty_sections] + main_build.execute(args) + args.insert(0, "--skip-existing") + import conda_build.source + + provide = mocker.patch.object(conda_build.source, "provide") + main_build.execute(args) + provide.assert_not_called() + output, error = capfd.readouterr() + assert "are already built" in output or "are already built" in error + + +def test_build_skip_existing_croot( + testing_workdir, + capfd, + conda_build_test_recipe_envvar: str, +): + # build the recipe first + empty_sections = os.path.join(metadata_dir, "empty_sections") + args = ["--no-anaconda-upload", "--croot", testing_workdir, empty_sections] + main_build.execute(args) + args.insert(0, "--skip-existing") + main_build.execute(args) + output, error = capfd.readouterr() + assert "are already built" in output + + +@pytest.mark.sanity +def test_package_test(testing_workdir, testing_metadata): + """Test calling conda build -t - rather than """ + api.output_yaml(testing_metadata, "recipe/meta.yaml") + output = api.build(testing_workdir, config=testing_metadata.config, notest=True)[0] + args = ["-t", output] + main_build.execute(args) + + +def test_activate_scripts_not_included(testing_workdir): + recipe = os.path.join(metadata_dir, "_activate_scripts_not_included") + args = ["--no-anaconda-upload", "--croot", testing_workdir, recipe] + main_build.execute(args) + out = api.get_output_file_paths(recipe, croot=testing_workdir)[0] + for f in ( + "bin/activate", + "bin/deactivate", + "bin/conda", + "Scripts/activate.bat", + "Scripts/deactivate.bat", + "Scripts/conda.bat", + "Scripts/activate.exe", + "Scripts/deactivate.exe", + "Scripts/conda.exe", + "Scripts/activate", + "Scripts/deactivate", + "Scripts/conda", + ): + assert not package_has_file(out, f) + + +def test_relative_path_croot( + conda_build_test_recipe_envvar: str, testing_config: Config +): + # this tries to build a package while specifying the croot with a relative path: + # conda-build --no-test --croot ./relative/path + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot = Path(".", "relative", "path") + + args = ["--no-anaconda-upload", f"--croot={croot}", str(empty_sections)] + main_build.execute(args) + + assert len(list(croot.glob("**/*.tar.bz2"))) == 1 + assert ( + croot / testing_config.subdir / "empty_with_build_script-0.0-0.tar.bz2" + ).is_file() + + +def test_relative_path_test_artifact( + conda_build_test_recipe_envvar: str, testing_config: Config +): + # this test builds a package into (cwd)/relative/path and then calls: + # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot_rel = Path(".", "relative", "path") + croot_abs = croot_rel.resolve() + + # build the package + args = [ + "--no-anaconda-upload", + "--no-test", + f"--croot={croot_abs}", + str(empty_sections), + ] + main_build.execute(args) + + assert len(list(croot_abs.glob("**/*.tar.bz2"))) == 1 + + # run the test stage with relative path + args = [ + "--no-anaconda-upload", + "--test", + os.path.join( + croot_rel, + testing_config.subdir, + "empty_with_build_script-0.0-0.tar.bz2", + ), + ] + main_build.execute(args) + + +def test_relative_path_test_recipe(conda_build_test_recipe_envvar: str): + # this test builds a package into (cwd)/relative/path and then calls: + # conda-build --test --croot ./relative/path/ /abs/path/to/recipe + + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot_rel = Path(".", "relative", "path") + croot_abs = croot_rel.resolve() + + # build the package + args = [ + "--no-anaconda-upload", + "--no-test", + f"--croot={croot_abs}", + str(empty_sections), + ] + main_build.execute(args) + + assert len(list(croot_abs.glob("**/*.tar.bz2"))) == 1 + + # run the test stage with relative croot + args = [ + "--no-anaconda-upload", + "--test", + f"--croot={croot_rel}", + str(empty_sections), + ] + main_build.execute(args) + + +def test_test_extra_dep(testing_metadata): + testing_metadata.meta["test"]["imports"] = ["imagesize"] + api.output_yaml(testing_metadata, "meta.yaml") + output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] + + # tests version constraints. CLI would quote this - "click <6.7" + args = [output, "-t", "--extra-deps", "imagesize <1.0"] + # extra_deps will add it in + main_build.execute(args) + + # missing click dep will fail tests + with pytest.raises(SystemExit): + args = [output, "-t"] + # extra_deps will add it in + main_build.execute(args) + + +@pytest.mark.parametrize( + "additional_args, is_long_test_prefix", + [([], True), (["--long-test-prefix"], True), (["--no-long-test-prefix"], False)], +) +def test_long_test_prefix(additional_args, is_long_test_prefix): + args = ["non_existing_recipe"] + additional_args + parser, args = main_build.parse_args(args) + config = Config(**args.__dict__) + assert config.long_test_prefix is is_long_test_prefix + + +@pytest.mark.serial +@pytest.mark.parametrize( + "zstd_level_condarc, zstd_level_cli", + [ + (None, None), + (1, None), + (1, 2), + ], +) +def test_zstd_compression_level( + testing_workdir, request, zstd_level_condarc, zstd_level_cli +): + assert zstd_compression_level_default not in {zstd_level_condarc, zstd_level_cli} + if zstd_level_condarc: + with open(os.path.join(testing_workdir, ".condarc"), "w") as f: + print( + "conda_build:", + f" zstd_compression_level: {zstd_level_condarc}", + sep="\n", + file=f, + ) + request.addfinalizer(_reset_config) + _reset_config([os.path.join(testing_workdir, ".condarc")]) + args = ["non_existing_recipe"] + if zstd_level_cli: + args.append(f"--zstd-compression-level={zstd_level_cli}") + parser, args = main_build.parse_args(args) + config = Config(**args.__dict__) + if zstd_level_cli: + assert config.zstd_compression_level == zstd_level_cli + elif zstd_level_condarc: + assert config.zstd_compression_level == zstd_level_condarc + else: + assert config.zstd_compression_level == zstd_compression_level_default + + +def test_user_warning(tmpdir, recwarn): + dir_recipe_path = tmpdir.mkdir("recipe-path") + recipe = dir_recipe_path.join("meta.yaml") + recipe.write("") + + main_build.parse_args([str(recipe)]) + assert ( + f"RECIPE_PATH received is a file ({recipe}).\n" + "It should be a path to a folder.\n" + "Forcing conda-build to use the recipe file." + ) == str(recwarn.pop(UserWarning).message) + + main_build.parse_args([str(dir_recipe_path)]) + assert not recwarn.list + + +def test_build_with_empty_channel_fails(empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_build.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + ) diff --git a/tests/cli/test_main_convert.py b/tests/cli/test_main_convert.py new file mode 100644 index 0000000000..9ff65849d9 --- /dev/null +++ b/tests/cli/test_main_convert.py @@ -0,0 +1,37 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os + +import pytest +from conda.gateways.connection.download import download + +from conda_build.cli import main_convert +from conda_build.tarcheck import TarCheck +from conda_build.utils import on_win + + +@pytest.mark.xfail( + on_win, + reason="This is a flaky test that doesn't seem to be working well on Windows.", +) +def test_convert(testing_workdir, testing_config): + # download a sample py2.7 package + f = "https://repo.anaconda.com/pkgs/free/win-64/affine-2.0.0-py27_0.tar.bz2" + pkg_name = "affine-2.0.0-py27_0.tar.bz2" + download(f, pkg_name) + # convert it to all platforms + args = ["-o", "converted", "--platform", "all", pkg_name] + main_convert.execute(args) + platforms = ["osx-64", "win-32", "linux-64", "linux-32"] + for platform in platforms: + dirname = os.path.join("converted", platform) + if platform != "win-64": + assert os.path.isdir(dirname) + assert pkg_name in os.listdir(dirname) + testing_config.host_subdir = platform + with TarCheck( + os.path.join(dirname, pkg_name), config=testing_config + ) as tar: + tar.correct_subdir() + else: + assert not os.path.isdir(dirname) diff --git a/tests/cli/test_main_debug.py b/tests/cli/test_main_debug.py index 991000279b..ae4f22441d 100644 --- a/tests/cli/test_main_debug.py +++ b/tests/cli/test_main_debug.py @@ -1,63 +1,53 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import io -import os.path import sys +from pathlib import Path from unittest import mock import pytest -from pytest import CaptureFixture +from pytest import CaptureFixture, MonkeyPatch -from conda_build.cli import main_debug as debug, validators as valid +from conda_build.cli import main_debug as debug +from conda_build.cli import validators as valid -@pytest.fixture(scope='module') -def main_debug_help() -> str: - """Read what the current help message should be and return it as a fixture""" - sys.argv = ['conda-debug'] - parser = debug.get_parser() +def test_main_debug_help_message(capsys: CaptureFixture, monkeypatch: MonkeyPatch): + monkeypatch.setattr(sys, "argv", ["conda-debug", "-h"]) + help_blurb = debug.get_parser().format_help() - with io.StringIO() as fp: - parser.print_usage(file=fp) - fp.seek(0) - yield fp.read() - - sys.argv = [] - - -def test_main_debug_help_message(capsys: CaptureFixture, main_debug_help: str): with pytest.raises(SystemExit): - debug.main() + debug.execute() captured = capsys.readouterr() - assert main_debug_help in captured.err + assert help_blurb in captured.out -def test_main_debug_file_does_not_exist(capsys: CaptureFixture): - sys.argv = ['conda-debug', 'file-does-not-exist'] +def test_main_debug_file_does_not_exist( + capsys: CaptureFixture, monkeypatch: MonkeyPatch +): + monkeypatch.setattr(sys, "argv", ["conda-debug", "file-does-not-exist"]) with pytest.raises(SystemExit): - debug.main() + debug.execute() captured = capsys.readouterr() assert valid.CONDA_PKG_OR_RECIPE_ERROR_MESSAGE in captured.err -def test_main_debug_happy_path(tmpdir, capsys: CaptureFixture): +def test_main_debug_happy_path( + tmp_path: Path, capsys: CaptureFixture, monkeypatch: MonkeyPatch +): """ Happy path through the main_debug.main function. """ - with mock.patch("conda_build.api.debug") as mock_debug: - fake_pkg_file = os.path.join(tmpdir, "fake-conda-pkg.conda") - fp = open(fake_pkg_file, "w") - fp.write("text") - fp.close() - sys.argv = ['conda-debug', fake_pkg_file] + fake = tmp_path / "fake-conda-pkg.conda" + fake.touch() + monkeypatch.setattr(sys, "argv", ["conda-debug", str(fake)]) - debug.main() + with mock.patch("conda_build.api.debug") as mock_debug: + debug.execute() captured = capsys.readouterr() - - assert captured.err == '' + assert captured.err == "" assert len(mock_debug.mock_calls) == 2 diff --git a/tests/cli/test_main_develop.py b/tests/cli/test_main_develop.py new file mode 100644 index 0000000000..c0c3cdca3d --- /dev/null +++ b/tests/cli/test_main_develop.py @@ -0,0 +1,30 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import sys + +from conda.gateways.connection.download import download + +from conda_build.cli import main_develop +from conda_build.utils import get_site_packages, tar_xf + + +def test_develop(testing_env): + f = "https://pypi.io/packages/source/c/conda_version_test/conda_version_test-0.1.0-1.tar.gz" + download(f, "conda_version_test.tar.gz") + tar_xf("conda_version_test.tar.gz", testing_env) + extract_folder = "conda_version_test-0.1.0-1" + cwd = os.getcwd() + args = ["-p", testing_env, extract_folder] + main_develop.execute(args) + py_ver = ".".join((str(sys.version_info.major), str(sys.version_info.minor))) + with open( + os.path.join(get_site_packages(testing_env, py_ver), "conda.pth") + ) as f_pth: + assert cwd in f_pth.read() + args = ["--uninstall", "-p", testing_env, extract_folder] + main_develop.execute(args) + with open( + os.path.join(get_site_packages(testing_env, py_ver), "conda.pth") + ) as f_pth: + assert cwd not in f_pth.read() diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py new file mode 100644 index 0000000000..b8931b5220 --- /dev/null +++ b/tests/cli/test_main_inspect.py @@ -0,0 +1,84 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import re +import sys + +import pytest +import yaml + +from conda_build import api +from conda_build.cli import main_inspect +from conda_build.utils import on_win + +from ..utils import metadata_dir + + +def test_inspect_installable(testing_workdir): + args = ["channels", "--test-installable", "conda-team"] + main_inspect.execute(args) + + +def test_inspect_linkages(testing_workdir, capfd): + # get a package that has known object output + args = ["linkages", "python"] + if on_win: + with pytest.raises(SystemExit) as exc: + main_inspect.execute(args) + assert "conda inspect linkages is only implemented in Linux and OS X" in exc + else: + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "libncursesw" in output + + +def test_inspect_objects(testing_workdir, capfd): + # get a package that has known object output + args = ["objects", "python"] + if sys.platform != "darwin": + with pytest.raises(SystemExit) as exc: + main_inspect.execute(args) + assert "conda inspect objects is only implemented in OS X" in exc + else: + main_inspect.execute(args) + output, error = capfd.readouterr() + assert re.search("rpath:.*@loader_path", output) + + +@pytest.mark.skipif(on_win, reason="Windows prefix length doesn't matter (yet?)") +def test_inspect_prefix_length(testing_workdir, capfd): + from conda_build import api + + # build our own known-length package here + test_base = os.path.expanduser("~/cbtmp") + config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) + recipe_path = os.path.join(metadata_dir, "has_prefix_files") + config.prefix_length = 80 + outputs = api.build(recipe_path, config=config, notest=True) + + args = ["prefix-lengths"] + outputs + with pytest.raises(SystemExit): + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "Packages with binary prefixes shorter than" in output + assert all(fn in output for fn in outputs) + + config.prefix_length = 255 + # reset the build id so that a new one is computed + config._build_id = "" + api.build(recipe_path, config=config, notest=True) + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "No packages found with binary prefixes shorter" in output + + +def test_inspect_hash_input(testing_metadata, testing_workdir, capfd): + testing_metadata.meta["requirements"]["build"] = ["zlib"] + api.output_yaml(testing_metadata, "meta.yaml") + output = api.build(testing_workdir, notest=True)[0] + with open(os.path.join(testing_workdir, "conda_build_config.yaml"), "w") as f: + yaml.dump({"zlib": ["1.2.11"]}, f) + args = ["hash-inputs", output] + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "zlib" in output diff --git a/tests/cli/test_main_metapackage.py b/tests/cli/test_main_metapackage.py new file mode 100644 index 0000000000..44ec145264 --- /dev/null +++ b/tests/cli/test_main_metapackage.py @@ -0,0 +1,97 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import json +import os +from glob import glob + +from conda_build.cli import main_metapackage +from conda_build.utils import package_has_file + + +def test_metapackage(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = ["metapackage_test", "1.0", "-d", "bzip2", "--no-anaconda-upload"] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + testing_config.croot, + testing_config.host_subdir, + "metapackage_test-1.0-0.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_build_number(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = [ + "metapackage_test_build_number", + "1.0", + "-d", + "bzip2", + "--build-number", + "1", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + testing_config.croot, + testing_config.host_subdir, + "metapackage_test_build_number-1.0-1.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_build_string(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = [ + "metapackage_test_build_string", + "1.0", + "-d", + "bzip2", + "--build-string", + "frank", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + testing_config.croot, + testing_config.host_subdir, + "metapackage_test_build_string-1.0-frank*.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_metadata(testing_config, testing_workdir): + args = [ + "metapackage_testing_metadata", + "1.0", + "-d", + "bzip2", + "--home", + "http://abc.com", + "--summary", + "wee", + "--license", + "BSD", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + + test_path = glob( + os.path.join( + testing_config.croot, + testing_config.host_subdir, + "metapackage_testing_metadata-1.0-0.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + info = json.loads(package_has_file(test_path, "info/index.json")) + assert info["license"] == "BSD" + info = json.loads(package_has_file(test_path, "info/about.json")) + assert info["home"] == "http://abc.com" + assert info["summary"] == "wee" diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py new file mode 100644 index 0000000000..ef5fdf077d --- /dev/null +++ b/tests/cli/test_main_render.py @@ -0,0 +1,162 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING + +import pytest +import yaml +from conda.exceptions import PackagesNotFoundError + +from conda_build import api +from conda_build.cli import main_render + +from ..utils import metadata_dir + +if TYPE_CHECKING: + from pathlib import Path + + +def test_render_add_channel(tmp_path: Path) -> None: + """This recipe requires the conda_build_test_requirement package, which is + only on the conda_build_test channel. This verifies that the -c argument + works for rendering.""" + rendered_filename = os.path.join(tmp_path, "out.yaml") + args = [ + "-c", + "conda_build_test", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + rendered_filename, + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta["requirements"]["build"] + if "conda_build_test_requirement" in pkg + ][0] + required_package_details = required_package_string.split(" ") + assert len(required_package_details) > 1, ( + "Expected version number on successful " + f"rendering, but got only {required_package_details}" + ) + assert ( + required_package_details[1] == "1.0" + ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" + + +def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_render.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + f"--file={tmp_path / 'out.yaml'}", + ] + ) + + +def test_render_output_build_path( + testing_workdir, testing_config, testing_metadata, capfd, caplog +): + api.output_yaml(testing_metadata, "meta.yaml") + args = ["--output", testing_workdir] + main_render.execute(args) + test_path = os.path.join( + testing_config.croot, + testing_metadata.config.host_subdir, + "test_render_output_build_path-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert output.rstrip() == test_path, error + assert error == "" + + +def test_render_output_build_path_and_file( + testing_workdir, testing_config, testing_metadata, capfd, caplog +): + api.output_yaml(testing_metadata, "meta.yaml") + rendered_filename = "out.yaml" + args = ["--output", "--file", rendered_filename, testing_workdir] + main_render.execute(args) + test_path = os.path.join( + testing_config.croot, + testing_metadata.config.host_subdir, + "test_render_output_build_path_and_file-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert output.rstrip() == test_path, error + assert error == "" + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + assert rendered_meta["package"]["name"] == "test_render_output_build_path_and_file" + + +def test_render_output_build_path_set_python(testing_workdir, testing_metadata, capfd): + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + api.output_yaml(testing_metadata, "meta.yaml") + # build the other major thing, whatever it is + if sys.version_info.major == 3: + version = "2.7" + else: + version = "3.5" + + api.output_yaml(testing_metadata, "meta.yaml") + metadata = api.render(testing_workdir, python=version)[0][0] + + args = ["--output", testing_workdir, "--python", version] + main_render.execute(args) + + _hash = metadata.hash_dependencies() + test_path = ( + "test_render_output_build_path_set_python-1.0-py{}{}{}_1.tar.bz2".format( + version.split(".")[0], version.split(".")[1], _hash + ) + ) + output, error = capfd.readouterr() + assert os.path.basename(output.rstrip()) == test_path, error + + +@pytest.mark.slow +def test_render_with_python_arg_reduces_subspace(capfd): + recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") + # build the package + args = [recipe, "--python=2.7", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 2 + + args = [recipe, "--python=3.9", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 1 + + # should raise an error, because python 3.6 is not in the matrix, so we don't know which vc + # to associate with + args = [recipe, "--python=3.6", "--output"] + with pytest.raises(ValueError): + main_render.execute(args) + + +def test_render_with_python_arg_CLI_reduces_subspace(capfd): + recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") + # build the package + args = [recipe, "--variants", "{python: [2.7, 3.9]}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 3 + + args = [recipe, "--variants", "{python: 2.7}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 2 + + args = [recipe, "--variants", "{python: 3.9}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 1 diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py new file mode 100644 index 0000000000..c2dd0a65b5 --- /dev/null +++ b/tests/cli/test_main_skeleton.py @@ -0,0 +1,59 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os + +import pytest + +from conda_build import api +from conda_build.cli import main_build, main_skeleton + + +@pytest.mark.sanity +def test_skeleton_pypi(testing_workdir, testing_config): + args = ["pypi", "peppercorn"] + main_skeleton.execute(args) + assert os.path.isdir("peppercorn") + + # ensure that recipe generated is buildable + main_build.execute(("peppercorn",)) + + +@pytest.mark.sanity +def test_skeleton_pypi_compatible_versions(testing_workdir, testing_config): + args = ["pypi", "openshift"] + main_skeleton.execute(args) + assert os.path.isdir("openshift") + + +@pytest.mark.slow +def test_skeleton_pypi_arguments_work(testing_workdir): + """ + These checks whether skeleton executes without error when these + options are specified on the command line AND whether the underlying + functionality works as a regression test for: + + https://github.com/conda/conda-build/pull/1384 + """ + args = ["pypi", "fasttext", "--version=0.9.2", "--pin-numpy"] + main_skeleton.execute(args) + assert os.path.isdir("fasttext") + + # Deliberately bypass metadata reading in conda build to get as + # close to the "ground truth" as possible. + with open(os.path.join("fasttext", "meta.yaml")) as f: + assert f.read().count("numpy x.x") == 2 + + args = [ + "pypi", + "photutils", + "--version=1.10.0", + "--setup-options=--offline", + "--extra-specs=extension-helpers", + ] + main_skeleton.execute(args) + assert os.path.isdir("photutils") + # Check that the setup option occurs in bld.bat and build.sh. + + metadata = api.render("photutils")[0][0] + assert "--offline" in metadata.meta["build"]["script"] + assert metadata.version() == "1.10.0" diff --git a/tests/cli/test_validators.py b/tests/cli/test_validators.py index 7de6c87d56..0809058a5b 100644 --- a/tests/cli/test_validators.py +++ b/tests/cli/test_validators.py @@ -1,8 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os from argparse import ArgumentError -from typing import Union import pytest @@ -10,18 +11,28 @@ @pytest.mark.parametrize( - 'file_or_folder,expected,is_dir,create', + "file_or_folder,expected,is_dir,create", [ # Happy path cases - ('aws-c-common-0.4.57-hb1e8313_1.tar.bz2', 'aws-c-common-0.4.57-hb1e8313_1.tar.bz2', False, True), - ('aws-c-common-0.4.57-hb1e8313_1.conda', 'aws-c-common-0.4.57-hb1e8313_1.conda', False, True), - ('somedir', 'somedir', True, True), + ( + "aws-c-common-0.4.57-hb1e8313_1.tar.bz2", + "aws-c-common-0.4.57-hb1e8313_1.tar.bz2", + False, + True, + ), + ( + "aws-c-common-0.4.57-hb1e8313_1.conda", + "aws-c-common-0.4.57-hb1e8313_1.conda", + False, + True, + ), + ("somedir", "somedir", True, True), # Error case (i.e. the file or directory does not exist - ('aws-c-common-0.4.57-hb1e8313_1.conda', False, False, False), + ("aws-c-common-0.4.57-hb1e8313_1.conda", False, False, False), ], ) def test_validate_is_conda_pkg_or_recipe_dir( - file_or_folder: str, expected: Union[str, bool], is_dir: bool, create: bool, tmpdir + file_or_folder: str, expected: str | bool, is_dir: bool, create: bool, tmpdir ): if create: file_or_folder = os.path.join(tmpdir, file_or_folder) @@ -34,7 +45,10 @@ def test_validate_is_conda_pkg_or_recipe_dir( try: received = valid.validate_is_conda_pkg_or_recipe_dir(file_or_folder) - except (ArgumentError, SystemExit): # if we get these errors, we know it's not valid + except ( + ArgumentError, + SystemExit, + ): # if we get these errors, we know it's not valid received = False assert received == expected diff --git a/tests/conftest.py b/tests/conftest.py index 981a439e3a..465cab6fcc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,23 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os +import subprocess import sys +import tempfile from collections import defaultdict +from pathlib import Path +from typing import Iterator import pytest +from conda.common.compat import on_mac, on_win +from conda_index.api import update_index +from pytest import MonkeyPatch +import conda_build import conda_build.config from conda_build.config import ( Config, - get_or_merge_config, + _get_or_merge_config, _src_cache_root_default, conda_pkg_format_default, enable_static_default, @@ -19,75 +27,58 @@ filename_hashing_default, ignore_verify_codes_default, no_rewrite_stdout_env_default, - noarch_python_build_age_default, ) from conda_build.metadata import MetaData from conda_build.utils import check_call_env, copy_into, prepend_bin_path from conda_build.variants import get_default_variant +@pytest.hookimpl +def pytest_report_header(config: pytest.Config): + # ensuring the expected development conda is being run + expected = Path(__file__).parent.parent / "conda_build" / "__init__.py" + assert expected.samefile(conda_build.__file__) + return f"conda_build.__file__: {conda_build.__file__}" + + @pytest.fixture(scope="function") -def testing_workdir(tmpdir, request): +def testing_workdir(monkeypatch: MonkeyPatch, tmp_path: Path) -> Iterator[str]: """Create a workdir in a safe temporary folder; cd into dir above before test, cd out after :param tmpdir: py.test fixture, will be injected :param request: py.test fixture-related, will be injected (see pytest docs) """ + saved_path = Path.cwd() + monkeypatch.chdir(tmp_path) - saved_path = os.getcwd() - - tmpdir.chdir() # temporary folder for profiling output, if any - tmpdir.mkdir("prof") + prof = tmp_path / "prof" + prof.mkdir(parents=True) - def return_to_saved_path(): - if os.path.isdir(os.path.join(saved_path, "prof")): - profdir = tmpdir.join("prof") - files = profdir.listdir("*.prof") if profdir.isdir() else [] + yield str(tmp_path) - for f in files: - copy_into(str(f), os.path.join(saved_path, "prof", f.basename)) - os.chdir(saved_path) - - request.addfinalizer(return_to_saved_path) - - return str(tmpdir) + # if the original CWD has a prof folder, copy any new prof files into it + if (saved_path / "prof").is_dir() and prof.is_dir(): + for file in prof.glob("*.prof"): + copy_into(str(file), str(saved_path / "prof" / file.name)) @pytest.fixture(scope="function") -def testing_homedir(tmpdir, request): - """Create a homedir in the users home directory; cd into dir above before test, cd out after - - :param tmpdir: py.test fixture, will be injected - :param request: py.test fixture-related, will be injected (see pytest docs) - """ - - saved_path = os.getcwd() - d1 = os.path.basename(tmpdir) - d2 = os.path.basename(os.path.dirname(tmpdir)) - d3 = os.path.basename(os.path.dirname(os.path.dirname(tmpdir))) - new_dir = os.path.join(os.path.expanduser("~"), d1, d2, d3, "pytest.conda-build") - # While pytest will make sure a folder in unique - if os.path.exists(new_dir): - import shutil - - try: - shutil.rmtree(new_dir) - except: - pass +def testing_homedir() -> Iterator[Path]: + """Create a temporary testing directory in the users home directory; cd into dir before test, cd out after.""" + saved = Path.cwd() try: - os.makedirs(new_dir) - except: - print(f"Failed to create {new_dir}") - return None - os.chdir(new_dir) - - def return_to_saved_path(): - os.chdir(saved_path) + with tempfile.TemporaryDirectory(dir=Path.home(), prefix=".pytest_") as home: + os.chdir(home) - request.addfinalizer(return_to_saved_path) + yield home - return str(new_dir) + os.chdir(saved) + except OSError: + pytest.xfail( + f"failed to create temporary directory () in {'%HOME%' if on_win else '${HOME}'} " + "(tmpfs inappropriate for xattrs)" + ) @pytest.fixture(scope="function") @@ -95,13 +86,12 @@ def testing_config(testing_workdir): def boolify(v): return v == "true" - result = Config( + testing_config_kwargs = dict( croot=testing_workdir, anaconda_upload=False, verbose=True, activate=False, debug=False, - variant=None, test_run_post=False, # These bits ensure that default values are used instead of any # present in ~/.condarc @@ -109,17 +99,17 @@ def boolify(v): _src_cache_root=_src_cache_root_default, error_overlinking=boolify(error_overlinking_default), error_overdepending=boolify(error_overdepending_default), - noarch_python_build_age=noarch_python_build_age_default, enable_static=boolify(enable_static_default), no_rewrite_stdout_env=boolify(no_rewrite_stdout_env_default), ignore_verify_codes=ignore_verify_codes_default, exit_on_verify_error=exit_on_verify_error_default, conda_pkg_format=conda_pkg_format_default, ) + result = Config(variant=None, **testing_config_kwargs) + result._testing_config_kwargs = testing_config_kwargs assert result.no_rewrite_stdout_env is False assert result._src_cache_root is None assert result.src_cache_root == testing_workdir - assert result.noarch_python_build_age == 0 return result @@ -135,11 +125,21 @@ def default_testing_config(testing_config, monkeypatch, request): return def get_or_merge_testing_config(config, variant=None, **kwargs): - return get_or_merge_config(config or testing_config, variant, **kwargs) + if not config: + # If no existing config, override kwargs that are None with testing config defaults. + # (E.g., "croot" is None if called via "(..., *args.__dict__)" in cli.main_build.) + kwargs.update( + { + key: value + for key, value in testing_config._testing_config_kwargs.items() + if kwargs.get(key) is None + } + ) + return _get_or_merge_config(config, variant, **kwargs) monkeypatch.setattr( conda_build.config, - "get_or_merge_config", + "_get_or_merge_config", get_or_merge_testing_config, ) @@ -186,22 +186,74 @@ def testing_env(testing_workdir, request, monkeypatch): return env_path -# these are functions so that they get regenerated each time we use them. -# They could be fixtures, I guess. -@pytest.fixture(scope="function") -def numpy_version_ignored(): - return { - "python": ["2.7.*", "3.5.*"], - "numpy": ["1.10.*", "1.11.*"], - "ignore_version": ["numpy"], - } +@pytest.fixture( + scope="function", + params=[ + pytest.param({}, id="default MACOSX_DEPLOYMENT_TARGET"), + pytest.param( + {"MACOSX_DEPLOYMENT_TARGET": ["10.9"]}, + id="override MACOSX_DEPLOYMENT_TARGET", + ), + ] + if on_mac + else [ + pytest.param({}, id="no MACOSX_DEPLOYMENT_TARGET"), + ], +) +def variants_conda_build_sysroot(monkeypatch, request): + if not on_mac: + return {} + monkeypatch.setenv( + "CONDA_BUILD_SYSROOT", + subprocess.run( + ["xcrun", "--sdk", "macosx", "--show-sdk-path"], + check=True, + capture_output=True, + text=True, + ).stdout.strip(), + ) + monkeypatch.setenv( + "MACOSX_DEPLOYMENT_TARGET", + subprocess.run( + ["xcrun", "--sdk", "macosx", "--show-sdk-version"], + check=True, + capture_output=True, + text=True, + ).stdout.strip(), + ) + return request.param -@pytest.fixture(scope="function") -def single_version(): - return {"python": "2.7.*", "numpy": "1.11.*"} +@pytest.fixture(scope="session") +def conda_build_test_recipe_path(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Clone conda_build_test_recipe. -@pytest.fixture(scope="function") -def no_numpy_version(): - return {"python": ["2.7.*", "3.5.*"]} + This exposes the special dummy package "source code" used to test various git/svn/local recipe configurations. + """ + # clone conda_build_test_recipe locally + repo = tmp_path_factory.mktemp("conda_build_test_recipe", numbered=False) + subprocess.run( + ["git", "clone", "https://github.com/conda/conda_build_test_recipe", str(repo)], + check=True, + ) + return repo + + +@pytest.fixture +def conda_build_test_recipe_envvar( + conda_build_test_recipe_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> str: + """Exposes the cloned conda_build_test_recipe as an environment variable.""" + name = "CONDA_BUILD_TEST_RECIPE_PATH" + monkeypatch.setenv(name, str(conda_build_test_recipe_path)) + return name + + +@pytest.fixture(scope="session") +def empty_channel(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Create a temporary, empty conda channel.""" + channel = tmp_path_factory.mktemp("empty_channel", numbered=False) + update_index(channel) + return channel diff --git a/tests/data/ldd/clear.elf b/tests/data/ldd/clear.elf new file mode 100755 index 0000000000..52013aa3ee Binary files /dev/null and b/tests/data/ldd/clear.elf differ diff --git a/tests/data/ldd/clear.exe b/tests/data/ldd/clear.exe new file mode 100644 index 0000000000..bd7543feba Binary files /dev/null and b/tests/data/ldd/clear.exe differ diff --git a/tests/data/ldd/clear.macho b/tests/data/ldd/clear.macho new file mode 100755 index 0000000000..8de24d5608 Binary files /dev/null and b/tests/data/ldd/clear.macho differ diff --git a/tests/data/ldd/jansi.dll b/tests/data/ldd/jansi.dll new file mode 100755 index 0000000000..81433eef25 Binary files /dev/null and b/tests/data/ldd/jansi.dll differ diff --git a/tests/data/ldd/uuid.pyd b/tests/data/ldd/uuid.pyd new file mode 100644 index 0000000000..f99ad10b9b Binary files /dev/null and b/tests/data/ldd/uuid.pyd differ diff --git a/tests/os_utils/test_codefile.py b/tests/os_utils/test_codefile.py new file mode 100644 index 0000000000..3cdcc0854d --- /dev/null +++ b/tests/os_utils/test_codefile.py @@ -0,0 +1,43 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from conda_build.os_utils.liefldd import codefile_class as liefldd_codefile_class +from conda_build.os_utils.pyldd import DLLfile, EXEfile, elffile, machofile +from conda_build.os_utils.pyldd import codefile_class as pyldd_codefile_class + +if TYPE_CHECKING: + from typing import Callable + +LDD = Path(__file__).parent.parent / "data" / "ldd" + + +@pytest.mark.parametrize( + "path,expect", + [ + pytest.param(__file__, None, id="Unknown"), + pytest.param(LDD / "jansi.dll", DLLfile, id="DLL"), + pytest.param(LDD / "uuid.pyd", DLLfile, id="PYD"), + pytest.param(LDD / "clear.exe", EXEfile, id="EXE"), + pytest.param(LDD / "clear.macho", machofile, id="MACHO"), + pytest.param(LDD / "clear.elf", elffile, id="ELF"), + ], +) +@pytest.mark.parametrize( + "codefile_class", + [ + pytest.param(pyldd_codefile_class, id="pyldd"), + pytest.param(liefldd_codefile_class, id="liefldd"), + ], +) +def test_codefile_class( + path: str | Path, + expect: type[DLLfile | EXEfile | machofile | elffile] | None, + codefile_class: Callable, +): + assert codefile_class(path) == expect diff --git a/tests/requirements-Linux.txt b/tests/requirements-Linux.txt new file mode 100644 index 0000000000..149ce09bad --- /dev/null +++ b/tests/requirements-Linux.txt @@ -0,0 +1,3 @@ +patch +patchelf +shellcheck diff --git a/tests/requirements-Windows.txt b/tests/requirements-Windows.txt new file mode 100644 index 0000000000..d08b4cac29 --- /dev/null +++ b/tests/requirements-Windows.txt @@ -0,0 +1,2 @@ +m2-git +m2-patch diff --git a/tests/requirements-ci.txt b/tests/requirements-ci.txt new file mode 100644 index 0000000000..23d78bb0b2 --- /dev/null +++ b/tests/requirements-ci.txt @@ -0,0 +1,19 @@ +anaconda-client +conda-forge::xdoctest +conda-verify +contextlib2 +coverage +cytoolz +git +numpy +perl +pip +pyflakes +pytest +pytest-cov +pytest-forked +pytest-mock +pytest-rerunfailures +pytest-xdist +ruamel.yaml +tomli # [py<3.11] for coverage pyproject.toml diff --git a/tests/requirements-macOS.txt b/tests/requirements-macOS.txt new file mode 100644 index 0000000000..133b191333 --- /dev/null +++ b/tests/requirements-macOS.txt @@ -0,0 +1,2 @@ +patch +shellcheck diff --git a/tests/requirements.txt b/tests/requirements.txt index 98e4dbf048..acb3317206 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,18 +1,23 @@ -# conda-build -# run as 'conda install --file tests/requirements.txt -c defaults' -anaconda-client -bs4 +beautifulsoup4 chardet -conda -conda-package-handling +conda >=23.7.0 +conda-index >=0.4.0 +conda-libmamba-solver # ensure we use libmamba +conda-package-handling >=1.3 filelock -glob2 +frozendict >=2.4.2 jinja2 +jsonschema >=4.19 +menuinst >=2 +packaging pkginfo psutil -pytest -pytest-cov -pytest-mock -pytest-rerunfailures +py-lief +python >=3.8 python-libarchive-c -toml +pytz +pyyaml +requests +ripgrep # for faster grep +setuptools_scm # needed for devenv version detection +tqdm diff --git a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml index 7213b08a6d..441ba67fd6 100644 --- a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml +++ b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml @@ -3,7 +3,7 @@ package: version: {{ GIT_DSECRIBE_TAG }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.20.2 requirements: diff --git a/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml index 97524573aa..80037b0ede 100644 --- a/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml +++ b/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml @@ -33,8 +33,6 @@ rust_compiler: - rust rust_compiler_version: - 1.29.0 -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] VERBOSE_AT: - V=1 VERBOSE_CM: @@ -155,8 +153,6 @@ macos_min_version: - 10.9 macos_machine: - x86_64-apple-darwin13.4.0 -MACOSX_DEPLOYMENT_TARGET: - - 10.9 mkl: - 2019 mpfr: diff --git a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml index cbe6ac859b..4bc665ad7d 100644 --- a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml +++ b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml @@ -1,5 +1,6 @@ package: name: pkg + version: 0.0.1 # build: # merge_build_host: False diff --git a/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml b/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml deleted file mode 100644 index 12632709ff..0000000000 --- a/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk - - /opt/MacOSX10.11.sdk -CONDA_BUILD_SYSROOT_NAME: - - sdk_10_10 - - sdk_10_11 -zip_keys: - - - - CONDA_BUILD_SYSROOT - - CONDA_BUILD_SYSROOT_NAME diff --git a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml index 4d79df4009..12b4442977 100644 --- a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml +++ b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml @@ -9,13 +9,8 @@ source: build: number: 0 - # .. this does not work .. - # string: {{ CONDA_BUILD_SYSROOT | replace('/', '_') | replace('.', '_') | lower() }} - # .. conda-build needs this as a hint, it does not detect the above CONDA_BUILD_SYSROOT | replace .. - # as jinja2-variant-fodder (I think). - string: {{ CONDA_BUILD_SYSROOT_NAME }} error_overlinking: True - error_ocerdepending: True + error_overdepending: True requirements: build: diff --git a/tests/test-recipes/metadata/_menu_json_validation/menu.json b/tests/test-recipes/metadata/_menu_json_validation/menu.json new file mode 100644 index 0000000000..eeed9e756f --- /dev/null +++ b/tests/test-recipes/metadata/_menu_json_validation/menu.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "$id": "https://schemas.conda.io/menuinst-1.schema.json", + "menu_name": "Example 1", + "menu_items": [ + { + "name": "Example", + "description": "This will install to Windows and Linux with default options. MacOS has a custom option.", + "command": [ + "{{ PYTHON }}", + "-c", + "import sys; print(sys.executable)" + ], + "platforms": { + "win": {}, + "linux": {}, + "osx": { + "CFBundleName": "My Example" + } + } + } + ] +} \ No newline at end of file diff --git a/tests/test-recipes/metadata/_menu_json_validation/meta.yaml b/tests/test-recipes/metadata/_menu_json_validation/meta.yaml new file mode 100644 index 0000000000..ac23805ec9 --- /dev/null +++ b/tests/test-recipes/metadata/_menu_json_validation/meta.yaml @@ -0,0 +1,10 @@ +package: + name: menu_json_validation + version: "1.0" + +build: + script: + - mkdir -p "${PREFIX}/Menu" # [unix] + - cp "${RECIPE_DIR}/menu.json" "${PREFIX}/Menu/menu_json_validation.json" # [unix] + - md "%PREFIX%\\Menu" # [win] + - copy /y "%RECIPE_DIR%\\menu.json" "%PREFIX%\\Menu\\menu_json_validation.json" # [win] diff --git a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml index d4f463886f..8aae740991 100644 --- a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml +++ b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml @@ -1,5 +1,6 @@ package: name: pkg + version: 0.0.1 build: merge_build_host: False diff --git a/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml new file mode 100644 index 0000000000..e636c4152c --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml @@ -0,0 +1,14 @@ +package: + name: test-noarch-with-no-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 diff --git a/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml new file mode 100644 index 0000000000..b0e2f804e2 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml @@ -0,0 +1,20 @@ +package: + name: test-noarch-with-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 + - colorama # [win] + - __win # [win] + - appnope # [osx] + - __osx # [osx] + - __archspec * ppc64le # [ppc64le] + - __linux # [linux] diff --git a/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml b/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml index f48cef77a6..de141ac10e 100644 --- a/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml +++ b/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: entry_points: diff --git a/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml b/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml new file mode 100644 index 0000000000..311b5a95d2 --- /dev/null +++ b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml @@ -0,0 +1,252 @@ +# Performance regression test for https://github.com/conda/conda-build/pull/5224 +# This is a reduced version of +# https://github.com/conda-forge/arrow-cpp-feedstock/blob/e6f573674c5f9c35c6a614a1563b2fe3eeb3e72b/recipe/meta.yaml +# stripped of everything apart from the large number of inter-output +# pin_subpackage dependencies/run_exports. +# Addendum: Omit libarrow-all, pyarrow, pyarrow-tests to reduce benchmark duration. + +package: + name: apache-arrow + version: 15.0.2 + +outputs: +# - name: libarrow-all +# build: +# run_exports: +# - {{ pin_subpackage("libarrow", max_pin="x") }} +# - {{ pin_subpackage("libarrow-acero", max_pin="x") }} +# - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} +# - {{ pin_subpackage("libarrow-flight", max_pin="x") }} +# - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} +# - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} +# - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} +# - {{ pin_subpackage("libparquet", max_pin="x") }} +# requirements: +# host: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} +# run: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} + + - name: libarrow + build: + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + + - name: libarrow-acero + build: + run_exports: + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-dataset + build: + run_exports: + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + + - name: libarrow-flight + build: + run_exports: + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + requirements: + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-flight-sql + build: + run_exports: + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + + - name: libarrow-gandiva + build: + run_exports: + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + requirements: + build: + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-substrait + build: + run_exports: + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + + - name: libparquet + build: + run_exports: + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + +# - name: pyarrow +# requirements: +# host: +# - {{ pin_subpackage("libarrow-all", exact=True) }} +# run: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} +# +# - name: pyarrow-tests +# requirements: +# host: +# - {{ pin_subpackage("libarrow-all", exact=True) }} +# - {{ pin_subpackage('pyarrow', exact=True) }} +# run: +# - {{ pin_subpackage('pyarrow', exact=True) }} + +# The original recipe had 173 selector lines; adding placeholders for these here: +about: + description: > + 00 # [x86_64] + 01 # [not x86_64] + 02 # [unix] + 03 # [not unix] + 04 # [linux] + 05 # [not linux] + 06 # [osx] + 07 # [not osx] + 08 # [win] + 09 # [not win] + 10 # [x86_64] + 11 # [not x86_64] + 12 # [unix] + 13 # [not unix] + 14 # [linux] + 15 # [not linux] + 16 # [osx] + 17 # [not osx] + 18 # [win] + 19 # [not win] + 20 # [x86_64] + 21 # [not x86_64] + 22 # [unix] + 23 # [not unix] + 24 # [linux] + 25 # [not linux] + 26 # [osx] + 27 # [not osx] + 28 # [win] + 29 # [not win] + 30 # [x86_64] + 31 # [not x86_64] + 32 # [unix] + 33 # [not unix] + 34 # [linux] + 35 # [not linux] + 36 # [osx] + 37 # [not osx] + 38 # [win] + 39 # [not win] + 40 # [x86_64] + 41 # [not x86_64] + 42 # [unix] + 43 # [not unix] + 44 # [linux] + 45 # [not linux] + 46 # [osx] + 47 # [not osx] + 48 # [win] + 49 # [not win] + 50 # [x86_64] + 51 # [not x86_64] + 52 # [unix] + 53 # [not unix] + 54 # [linux] + 55 # [not linux] + 56 # [osx] + 57 # [not osx] + 58 # [win] + 59 # [not win] + 60 # [x86_64] + 61 # [not x86_64] + 62 # [unix] + 63 # [not unix] + 64 # [linux] + 65 # [not linux] + 66 # [osx] + 67 # [not osx] + 68 # [win] + 69 # [not win] + 70 # [x86_64] + 71 # [not x86_64] + 72 # [unix] + 73 # [not unix] + 74 # [linux] + 75 # [not linux] + 76 # [osx] + 77 # [not osx] + 78 # [win] + 79 # [not win] + 80 # [x86_64] + 81 # [not x86_64] + 82 # [unix] + 83 # [not unix] + 84 # [linux] + 85 # [not linux] + 86 # [osx] + 87 # [not osx] + 88 # [win] + 89 # [not win] + 90 # [x86_64] + 91 # [not x86_64] + 92 # [unix] + 93 # [not unix] + 94 # [linux] + 95 # [not linux] + 96 # [osx] + 97 # [not osx] + 98 # [win] + 99 # [not win] diff --git a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml index 7619b42085..663d173590 100644 --- a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml +++ b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml @@ -6,8 +6,10 @@ requirements: build: - numpy - nomkl # [unix] + - openssl host: - curl + - {{ pin_compatible('openssl', exact=True) }} run: {% for package in resolved_packages('build') %} - {{ package }} diff --git a/tests/test-recipes/metadata/_rpath/conda_build_config.yaml b/tests/test-recipes/metadata/_rpath/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_rpath/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_rpath_symlink/meta.yaml b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml new file mode 100644 index 0000000000..0ef58cdab2 --- /dev/null +++ b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml @@ -0,0 +1,39 @@ +{% set lib_file = "libthing.so.1.0.0" %} # [linux] +{% set lib_file = "libthing.1.0.0.dylib" %} # [osx] + +package: + name: rpath_symlink + version: 1.0.0 + +build: + skip: true # [not (linux or osx)] + rpaths_patcher: {{ rpaths_patcher }} + script: + - mkdir -p "${PREFIX}/lib" + - > + < /dev/null ${CC} ${CPPFLAGS} ${CFLAGS} ${LDFLAGS} + -x c - -nostdlib -s -o "${PREFIX}/lib/{{ lib_file }}" "-Wl,-rpath,${PREFIX}/lib" + -shared -Wl,-soname,libthing.so.1 # [linux] + -dynamiclib -install_name libthing.1.dylib # [osx] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.so.1" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.1.dylib" # [osx] + - mkdir -p "${PREFIX}/lib/subfolder" + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.so" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.dylib" # [osx] + +requirements: + build: + - {{ compiler("c") }} + +test: + requires: + - py-lief + commands: + # Test that we get only a single entry that is the library's own directory. + - | + python -c ' + import os, lief + lib = lief.parse(os.environ["PREFIX"] + "/lib/{{ lib_file }}") + assert {"$ORIGIN/."} == {e.rpath for e in lib.dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH} # [linux] + assert {"@loader_path/"} == {command.path for command in lib.commands if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH} # [osx] + ' diff --git a/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml b/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml index 130ecdf296..19eaa35bd3 100644 --- a/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml +++ b/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml @@ -34,8 +34,6 @@ rust_compiler: - rust rust_compiler_version: - 1.32.0 -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] VERBOSE_AT: - V=1 VERBOSE_CM: @@ -160,8 +158,6 @@ macos_min_version: - 10.9 macos_machine: - x86_64-apple-darwin13.4.0 -MACOSX_DEPLOYMENT_TARGET: - - 10.9 mkl: - 2019 mpfr: diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml index e913098671..44de041e88 100644 --- a/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml @@ -7,4 +7,4 @@ build: string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml index 47abc99dee..e89e55fcc5 100644 --- a/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml @@ -7,4 +7,4 @@ build: string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/_source_setuptools/meta.yaml b/tests/test-recipes/metadata/_source_setuptools/meta.yaml index 0383f2f894..0d4d1a89d4 100644 --- a/tests/test-recipes/metadata/_source_setuptools/meta.yaml +++ b/tests/test-recipes/metadata/_source_setuptools/meta.yaml @@ -13,7 +13,7 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 requirements: diff --git a/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml b/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml new file mode 100644 index 0000000000..a6ac88cd33 --- /dev/null +++ b/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml @@ -0,0 +1,8 @@ +c_stdlib: # [unix] + - sysroot # [linux] + - macosx_deployment_target # [osx] +c_stdlib_version: # [unix] + - 2.12 # [linux64] + - 2.17 # [aarch64 or ppc64le] + - 10.13 # [osx and x86_64] + - 11.0 # [osx and arm64] diff --git a/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml b/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml new file mode 100644 index 0000000000..c655aac2ca --- /dev/null +++ b/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml @@ -0,0 +1,9 @@ +package: + name: stdlib-test + version: 1.0 + +requirements: + host: + - {{ stdlib('c') }} + # - {{ stdlib('cxx') }} + # - {{ stdlib('fortran') }} diff --git a/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml b/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml deleted file mode 100644 index 8a4b2b8a87..0000000000 --- a/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml +++ /dev/null @@ -1,58 +0,0 @@ -{% set name = "pyslet" %} -{% set version = "0.5.20140801" %} -{% set sha256 = "89538ad432d8c51b7d4b419817526f864697580d5eb1471784d15f6c056a88b6" %} - -package: - name: {{ name|lower }} - version: {{ version }} - -source: - fn: {{ name }}-{{ version }}.tar.gz - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz - sha256: {{ sha256 }} - -build: - skip: True # [py3k] - number: 0 - script: python setup.py install --single-version-externally-managed --record record.txt - -requirements: - build: - - python - - setuptools - - run: - - python - -test: - imports: - - pyslet - - pyslet.http - - pyslet.odata2 - - pyslet.qtiv1 - - pyslet.qtiv2 - - pyslet.xml20081126 - -about: - home: http://www.pyslet.org/ - license: BSD-3-Clause - license_family: BSD - summary: 'Pyslet: Python package for Standards in Learning, Education and Training' - description: | - Pyslet is a Python package for Standards in Learning Education and Training - (LET). It implements a number of LET-specific standards, including IMS QTI, - Content Packaging and Basic LTI. It also includes support for some general - standards, including the data access standard OData (see - http://www.odata.org). - - Pyslet was originally written to be the engine behind the QTI migration - tool but it can be used independently as a support module for your own - Python applications. - - Pyslet currently supports Python 2.6 and 2.7, see docs for details. - doc_url: http://pyslet.readthedocs.org - dev_url: https://github.com/swl10/pyslet - -extra: - recipe-maintainers: - - stuertz diff --git a/tests/test-recipes/metadata/empty_sections/meta.yaml b/tests/test-recipes/metadata/empty_sections/meta.yaml index 4b5d82b6db..e19a684f7e 100644 --- a/tests/test-recipes/metadata/empty_sections/meta.yaml +++ b/tests/test-recipes/metadata/empty_sections/meta.yaml @@ -3,7 +3,7 @@ package: version: 0.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: diff --git a/tests/test-recipes/metadata/empty_with_build_script/meta.yaml b/tests/test-recipes/metadata/empty_with_build_script/meta.yaml index a466377831..8ee7bf4efa 100644 --- a/tests/test-recipes/metadata/empty_with_build_script/meta.yaml +++ b/tests/test-recipes/metadata/empty_with_build_script/meta.yaml @@ -3,7 +3,7 @@ package: version: 0.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: diff --git a/tests/test-recipes/metadata/entry_points/meta.yaml b/tests/test-recipes/metadata/entry_points/meta.yaml index 217fcaa30e..ac07a8cb4b 100644 --- a/tests/test-recipes/metadata/entry_points/meta.yaml +++ b/tests/test-recipes/metadata/entry_points/meta.yaml @@ -20,3 +20,7 @@ requirements: - setuptools run: - python + +# Ensure we get different build strings for concurrently tested packages. +extra: + dummy: '{{ pytest_name is defined }}' diff --git a/tests/test-recipes/metadata/jinja2_build_str/meta.yaml b/tests/test-recipes/metadata/jinja2_build_str/meta.yaml index 2ad274b10f..97f45ae361 100644 --- a/tests/test-recipes/metadata/jinja2_build_str/meta.yaml +++ b/tests/test-recipes/metadata/jinja2_build_str/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: string: {{ PKG_BUILDNUM }}_g{{ GIT_FULL_HASH[:7] }} diff --git a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py index 5a17ae9b0a..8e09c8abe4 100644 --- a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py +++ b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup VERSION = '1.test' diff --git a/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml b/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml new file mode 100644 index 0000000000..ab5d0ba6d8 --- /dev/null +++ b/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml @@ -0,0 +1,8 @@ +{% set pyproject = load_file_data("pyproject.toml") %} + +package: + name: example + version: {{ pyproject['project']['version'] }} + +source: + path: ./src diff --git a/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml b/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml new file mode 100644 index 0000000000..e55cde5d10 --- /dev/null +++ b/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml @@ -0,0 +1,2 @@ +[project] +version = "1.test" diff --git a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml index 79859474e3..3bf9010a3b 100644 --- a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml +++ b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml @@ -2,6 +2,6 @@ name: foo channels: - defaults dependencies: - - python=3.10 + - python - tqdm prefix: /home/abraham/.conda/envs/foo diff --git a/tests/test-recipes/metadata/numpy_build/run_test.bat b/tests/test-recipes/metadata/numpy_build/run_test.bat index 326404b171..c7539158cc 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.bat +++ b/tests/test-recipes/metadata/numpy_build/run_test.bat @@ -1,4 +1,7 @@ +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 + +:: grep for package conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-1.0-0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/numpy_build/run_test.py b/tests/test-recipes/metadata/numpy_build/run_test.py index 7e8e5c3cb8..5e23751d1b 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.py +++ b/tests/test-recipes/metadata/numpy_build/run_test.py @@ -1,15 +1,15 @@ import os -from glob import glob import json +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - info_file = glob(os.path.join(prefix, 'conda-meta', 'conda-build-test-numpy-build-1.0*.json'))[0] - with open(info_file) as fh: - info = json.load(fh) + info_files = list(Path(os.environ['PREFIX'], 'conda-meta').glob('conda-build-test-numpy-build-1.0-0.json')) + assert len(info_files) == 1 + info = json.loads(info_files[0].read_text()) assert len(info['depends']) == 0 + if __name__ == '__main__': main() diff --git a/tests/test-recipes/metadata/numpy_build/run_test.sh b/tests/test-recipes/metadata/numpy_build/run_test.sh index 6098c7d093..902155f5f9 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.sh +++ b/tests/test-recipes/metadata/numpy_build/run_test.sh @@ -1 +1,5 @@ -conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-1.0-0" +# show list +conda list -p "$PREFIX" --canonical + +# grep for package +conda list -p "$PREFIX" --canonical | grep "conda-build-test-numpy-build-1.0-0" diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.bat b/tests/test-recipes/metadata/numpy_build_run/run_test.bat index 069cc9ff13..d2fd25cc5b 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.bat +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.bat @@ -1,5 +1,7 @@ -@echo on +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 -conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0" + +:: grep for package +conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1.0-py.*_0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.py b/tests/test-recipes/metadata/numpy_build_run/run_test.py index 54c0b3859f..7e4c6a7f2a 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.py +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.py @@ -1,24 +1,20 @@ import os import json -import glob +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - - info_files = glob.glob(os.path.join(prefix, 'conda-meta', - 'conda-build-test-numpy-build-run-1.0-py*0.json')) + info_files = list(Path(os.environ['PREFIX'], "conda-meta").glob('conda-build-test-numpy-build-run-1.0-py*_0.json')) assert len(info_files) == 1 - info_file = info_files[0] - with open(info_file) as fh: - info = json.load(fh) - # numpy with no version, python with no version, python with version pin + info = json.loads(info_files[0].read_text()) assert len(info['depends']) == 2 - depends = sorted(info['depends']) - # With no version - assert depends[0] == 'numpy' - assert depends[1].startswith('python ') + + # numpy with no version, python with version pin + numpy, python = sorted(info['depends']) + assert numpy == 'numpy' + assert python.startswith('python ') + if __name__ == '__main__': main() diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.sh b/tests/test-recipes/metadata/numpy_build_run/run_test.sh index d56a5445ba..1079b31761 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.sh +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.sh @@ -1,3 +1,5 @@ +# show list conda list -p $PREFIX --canonical -# Test the build string. Should contain NumPy, but not the version -conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0" + +# grep for package +conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1.0-py.*_0" diff --git a/tests/test-recipes/metadata/osx_is_app/meta.yaml b/tests/test-recipes/metadata/osx_is_app/meta.yaml index c6f6a887c2..2195d740ce 100644 --- a/tests/test-recipes/metadata/osx_is_app/meta.yaml +++ b/tests/test-recipes/metadata/osx_is_app/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: entry_points: diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat new file mode 100644 index 0000000000..b6584f3971 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat @@ -0,0 +1,2 @@ +:: Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +< nul set /p="%PKG_NAME:~0,4%" > "%PREFIX%\file" & call; diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh new file mode 100644 index 0000000000..cb0be8cb2b --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh @@ -0,0 +1,2 @@ +## Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +printf '%.4s' "${PKG_NAME}" > "${PREFIX}/file" diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml new file mode 100644 index 0000000000..1c27afc126 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml @@ -0,0 +1,40 @@ +{% set name = "outputs_overwrite_base_file" %} + +package: + name: {{ name }} + version: 1.0 + +outputs: + - name: base-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + + - name: first-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = base # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] + + - name: second-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = "base" # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] diff --git a/tests/test-recipes/metadata/python_build_run/run_test.bat b/tests/test-recipes/metadata/python_build_run/run_test.bat index 7ef8c3e19c..0fb1f9d9ce 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.bat +++ b/tests/test-recipes/metadata/python_build_run/run_test.bat @@ -1,4 +1,7 @@ +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 -conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" + +:: grep for package +conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-run-1.0-py.*_0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/python_build_run/run_test.py b/tests/test-recipes/metadata/python_build_run/run_test.py index b3c64cc2a1..a34db82af7 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.py +++ b/tests/test-recipes/metadata/python_build_run/run_test.py @@ -1,20 +1,18 @@ import os import json -import glob +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - info_files = glob.glob(os.path.join(prefix, 'conda-meta', - 'conda-build-test-python-build-run-1.0-py*0.json')) + info_files = list(Path(os.environ['PREFIX'], 'conda-meta').glob('conda-build-test-python-build-run-1.0-py*0.json')) assert len(info_files) == 1 - info_file = info_files[0] - with open(info_file) as fh: - info = json.load(fh) - # one without the version, and another with the version - assert len(info['depends']) == 1, info['depends'] - assert info['depends'][0].startswith('python ') + info = json.loads(info_files[0].read_text()) + assert len(info['depends']) == 1 + + # python with version pin + python, = info['depends'] + assert python.startswith('python ') if __name__ == '__main__': diff --git a/tests/test-recipes/metadata/python_build_run/run_test.sh b/tests/test-recipes/metadata/python_build_run/run_test.sh index ebf0ddfb90..34d6e0d89a 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.sh +++ b/tests/test-recipes/metadata/python_build_run/run_test.sh @@ -1,3 +1,5 @@ -conda list -p $PREFIX --canonical -# Test the build string. Should contain Python -conda list -p $PREFIX --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" +# show list +conda list -p "$PREFIX" --canonical + +# grep for package +conda list -p "$PREFIX" --canonical | grep "conda-build-test-python-build-run-1.0-py.*_0" diff --git a/tests/test-recipes/metadata/source_multiple/meta.yaml b/tests/test-recipes/metadata/source_multiple/meta.yaml index bb35ae7356..bbd2cb4f03 100644 --- a/tests/test-recipes/metadata/source_multiple/meta.yaml +++ b/tests/test-recipes/metadata/source_multiple/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - - path: ../../../../../conda_build_test_recipe + - path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} - git_url: https://github.com/conda/conda_build_test_recipe git_tag: 1.20.2 diff --git a/tests/test-recipes/metadata/source_path/meta.yaml b/tests/test-recipes/metadata/source_path/meta.yaml index a352bf502e..3ac5ab81f8 100644 --- a/tests/test-recipes/metadata/source_path/meta.yaml +++ b/tests/test-recipes/metadata/source_path/meta.yaml @@ -3,4 +3,4 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/source_regex/meta.yaml b/tests/test-recipes/metadata/source_regex/meta.yaml index 1e1a34873d..e0f07f5527 100644 --- a/tests/test-recipes/metadata/source_regex/meta.yaml +++ b/tests/test-recipes/metadata/source_regex/meta.yaml @@ -10,7 +10,7 @@ package: version: {{ data.group(1) }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 build: diff --git a/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml index 57e002f047..931cf10b8b 100644 --- a/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml +++ b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml @@ -10,7 +10,7 @@ package: version: {{ data.group(1) }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 build: diff --git a/tests/test-recipes/metadata/source_setup_py_data/bld.bat b/tests/test-recipes/metadata/source_setup_py_data/bld.bat index 4168d5d6f0..3399daa92d 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/bld.bat +++ b/tests/test-recipes/metadata/source_setup_py_data/bld.bat @@ -6,6 +6,6 @@ if errorlevel 1 exit 1 for /f "delims=" %%i in ('git describe') do set gitdesc=%%i if errorlevel 1 exit 1 echo "%gitdesc%" -if not "%gitdesc%"=="1.21.0" exit 1 +if not "%gitdesc%"=="1.22.0" exit 1 echo "%PKG_VERSION%" -if not "%PKG_VERSION%"=="1.21.0" exit 1 +if not "%PKG_VERSION%"=="1.22.0" exit 1 diff --git a/tests/test-recipes/metadata/source_setup_py_data/build.sh b/tests/test-recipes/metadata/source_setup_py_data/build.sh index ecde5ca3c3..3c8cd9361a 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/build.sh +++ b/tests/test-recipes/metadata/source_setup_py_data/build.sh @@ -3,6 +3,6 @@ # Ensure we are in a git repo [ -d .git ] git describe -[ "$(git describe)" = 1.21.0 ] +[ "$(git describe)" = 1.22.0 ] echo "\$PKG_VERSION = $PKG_VERSION" -[ "${PKG_VERSION}" = 1.21.0 ] +[ "${PKG_VERSION}" = 1.22.0 ] diff --git a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml index fd76f67c43..459c19ba74 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml @@ -11,8 +11,8 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe - git_tag: 1.21.0 + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} + git_tag: 1.22.0 build: entry_points: diff --git a/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml index 624dc3e72d..81d29feece 100644 --- a/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml @@ -11,7 +11,7 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 folder: src diff --git a/tests/test-recipes/metadata/state_variables/setup.py b/tests/test-recipes/metadata/state_variables/setup.py index 2f8660659e..ecd50c54c6 100644 --- a/tests/test-recipes/metadata/state_variables/setup.py +++ b/tests/test-recipes/metadata/state_variables/setup.py @@ -1,6 +1,5 @@ import os from setuptools import setup -# from distutils.core import setup if not os.getenv("CONDA_BUILD_STATE") == "RENDER": raise ValueError("Conda build state not set correctly") diff --git a/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml b/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml index 5b67ea45ca..3ed3d1cf53 100644 --- a/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml @@ -10,4 +10,4 @@ build: string: {{ GIT_BUILD_STR }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml b/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml index f5e84cdcfd..cd32d276af 100644 --- a/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml @@ -10,4 +10,4 @@ build: string: {{ environ.get('GIT_BUILD_STR', '') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml index 44f74bec88..9d7b9dd2df 100644 --- a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml @@ -7,8 +7,8 @@ package: # source will be downloaded prior to filling in jinja templates # Example assumes that this folder has setup.py in it source: - git_url: ../../../../../conda_build_test_recipe - git_tag: 1.21.0 + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} + git_tag: 1.22.0 requirements: build: diff --git a/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml b/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml index dda10e6be8..ba0bd54f85 100644 --- a/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml +++ b/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} outputs: - type: wheel diff --git a/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml b/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml new file mode 100644 index 0000000000..d1c2bfbe57 --- /dev/null +++ b/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml @@ -0,0 +1,9 @@ +package: + name: test_build_script_in_output + version: 1.0 + +outputs: + - name: test_1 + build: + script_env: + - TEST_FN_DOESNT_EXIST diff --git a/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml b/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml new file mode 100644 index 0000000000..2cb957bfff --- /dev/null +++ b/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml @@ -0,0 +1,6 @@ +package: + name: _empty_outputs_requires_package_version + # when there are not outputs, package/version is required + # version: 0 + +outputs: diff --git a/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml b/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml index 1b363535e7..8ef7c2c8d8 100644 --- a/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml +++ b/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} requirements: build: diff --git a/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml b/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml index de88138e64..9c788bb2ba 100644 --- a/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml @@ -1,5 +1,5 @@ -abc: +DEP1: - 2.7.16 - 3.7.3 -foo: +DEP2: - 3.5.1 diff --git a/tests/test-recipes/split-packages/_intradependencies/install-lib1intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-lib1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-lib1intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-lib1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-lib2intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-lib2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-lib2intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-lib2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-py1-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-py1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-py1-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-py1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-py2-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-py2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-py2-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-py2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-r1-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-r1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-r1-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-r1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-r2-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-r2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-r2-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-r2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/meta.yaml b/tests/test-recipes/split-packages/_intradependencies/meta.yaml index 25b2611c3b..644ff633d0 100644 --- a/tests/test-recipes/split-packages/_intradependencies/meta.yaml +++ b/tests/test-recipes/split-packages/_intradependencies/meta.yaml @@ -4,49 +4,42 @@ package: requirements: build: - # the purpose of these dependencies is to disallow empty requirements from making the tests - # pass when they would otherwise fail. Empty requirements hide lack of finalization. - - posix # [win] - - git # [not win] + # the purpose of this dependency is to disallow empty requirements from making the tests + # pass when they would otherwise fail, empty requirements hide lack of finalization + - ca-certificates outputs: - - name: abc - version: {{ abc }} - - name: foo - version: {{ foo }} + - name: dep1 + version: {{ DEP1 }} + - name: dep2 + version: {{ DEP2 }} - - name: lib1intradependencies - script: install-lib1intradependencies.sh + - name: lib1 + script: install-lib1.sh - - name: py1-intradependencies + - name: py1 requirements: - - {{ pin_subpackage('lib1intradependencies', exact=True) }} - - abc {{ abc }} - - m2-base # [win] - script: install-py1-intradependencies.sh + - {{ pin_subpackage('lib1', exact=True) }} + - dep1 {{ DEP1 }} + script: install-py1.sh - - name: r1-intradependencies + - name: r1 requirements: - - {{ pin_subpackage('lib1intradependencies', exact=True) }} - - foo {{ foo }} - - m2-base # [win] - script: install-r1-intradependencies.sh + - {{ pin_subpackage('lib1', exact=True) }} + - dep2 {{ DEP2 }} + script: install-r1.sh - - name: lib2intradependencies - requirements: - - m2-base # [win] - script: install-lib2intradependencies.sh + - name: lib2 + script: install-lib2.sh - - name: py2-intradependencies + - name: py2 requirements: - - {{ pin_subpackage('lib2intradependencies', exact=True) }} - - abc {{ abc }} - - m2-base # [win] - script: install-py2-intradependencies.sh + - {{ pin_subpackage('lib2', exact=True) }} + - dep1 {{ DEP1 }} + script: install-py2.sh - - name: r2-intradependencies + - name: r2 requirements: - - {{ pin_subpackage('lib2intradependencies', exact=True) }} - - foo {{ foo }} - - m2-base # [win] - script: install-r2-intradependencies.sh + - {{ pin_subpackage('lib2', exact=True) }} + - dep2 {{ DEP2 }} + script: install-r2.sh diff --git a/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml b/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml new file mode 100644 index 0000000000..6943d411f7 --- /dev/null +++ b/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml @@ -0,0 +1,12 @@ +package: + name: _multi_outputs_without_package_version + # when there are outputs, package/version is not required + # version: 0 + +outputs: + - name: a + version: 1 + - name: b + version: 2 + - name: c + version: 3 diff --git a/tests/test-recipes/split-packages/_order/meta.yaml b/tests/test-recipes/split-packages/_order/meta.yaml index df0c0db7b2..fb171942a8 100644 --- a/tests/test-recipes/split-packages/_order/meta.yaml +++ b/tests/test-recipes/split-packages/_order/meta.yaml @@ -1,5 +1,7 @@ package: name: toplevel-ab + version: 1 + outputs: - name: a version: 1 diff --git a/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml b/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml index f392b6d9b2..36dddf6134 100644 --- a/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml @@ -1,3 +1,3 @@ python: - - 2.7 + - 3.8 - 3.9 diff --git a/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml b/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml index f392b6d9b2..36dddf6134 100644 --- a/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml @@ -1,3 +1,3 @@ python: - - 2.7 + - 3.8 - 3.9 diff --git a/tests/test-recipes/test-package/setup.py b/tests/test-recipes/test-package/setup.py index f3ec4e663c..b0f90841cb 100644 --- a/tests/test-recipes/test-package/setup.py +++ b/tests/test-recipes/test-package/setup.py @@ -1,6 +1,5 @@ import sys from setuptools import setup -# from distutils.core import setup # test with an old version of Python that we'll never normally use if sys.version_info[:2] == (3, 5): diff --git a/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml b/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml new file mode 100644 index 0000000000..141ac077e4 --- /dev/null +++ b/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml @@ -0,0 +1,11 @@ +package: + name: ignore_version_reduces_matrix + version: 1.0 + +requirements: + build: + - packageA + - packageB { packageB } + run: + - packageA + - packageB diff --git a/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml b/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml deleted file mode 100644 index d8481eabec..0000000000 --- a/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml +++ /dev/null @@ -1,8 +0,0 @@ -python: - - 2.7 - - 3.9 -numpy: - - 1.10 - - 1.16 -ignore_version: - - numpy diff --git a/tests/test-recipes/variants/03_numpy_matrix/meta.yaml b/tests/test-recipes/variants/03_numpy_matrix/meta.yaml deleted file mode 100644 index 0f5a69f2a9..0000000000 --- a/tests/test-recipes/variants/03_numpy_matrix/meta.yaml +++ /dev/null @@ -1,12 +0,0 @@ -package: - name: numpy_matrix - version: 1.0 - -requirements: - build: - - python - - numpy - - nomkl # [unix] - run: - - python - - numpy diff --git a/tests/test-recipes/variants/27_requirements_host/meta.yaml b/tests/test-recipes/variants/27_requirements_host/meta.yaml index 0c4a833fa8..0ab071e56b 100644 --- a/tests/test-recipes/variants/27_requirements_host/meta.yaml +++ b/tests/test-recipes/variants/27_requirements_host/meta.yaml @@ -1,5 +1,6 @@ package: name: cfastpm + version: 0.0.1 requirements: host: diff --git a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml index 6a2ce5d722..56a761a011 100644 --- a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml +++ b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml @@ -2,5 +2,5 @@ python: - 3.8 - 3.9 numpy: - - 1.16 - 1.19 + - 1.22 diff --git a/tests/variant_recipe/meta.yaml b/tests/test-recipes/variants/variant_recipe/meta.yaml similarity index 73% rename from tests/variant_recipe/meta.yaml rename to tests/test-recipes/variants/variant_recipe/meta.yaml index 4bf58dceef..5645afd47f 100644 --- a/tests/variant_recipe/meta.yaml +++ b/tests/test-recipes/variants/variant_recipe/meta.yaml @@ -5,11 +5,8 @@ package: requirements: build: - python - - numpy - - nomkl # [unix] run: - python - - numpy about: summary: {{ python }} diff --git a/tests/test_api_build.py b/tests/test_api_build.py index bf1dc00af9..a663f18e73 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -4,50 +4,72 @@ This module tests the build API. These are high-level integration tests. """ -import base64 -import locale -from collections import OrderedDict -from glob import glob +from __future__ import annotations + +import json import logging import os import re import subprocess import sys -import json +import tarfile import uuid +from collections import OrderedDict +from contextlib import nullcontext +from glob import glob +from pathlib import Path +from shutil import which +from typing import TYPE_CHECKING # for version import conda - -from conda_build.conda_interface import url_path, LinkError, CondaError, cc_conda_build -from conda_build.conda_interface import linked - -import conda_build - -from binstar_client.commands import remove, show -from binstar_client.errors import NotFound -from pkg_resources import parse_version import pytest import yaml -import tarfile - -from conda_build import api, exceptions, __version__ -from conda_build.build import VersionOrder -from conda_build.render import finalize_metadata -from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, - package_has_file, check_output_env, get_conda_operation_locks, rm_rf, - walk, env_var, FileNotFoundError) +from binstar_client.commands import remove, show +from binstar_client.errors import NotFound +from conda.base.context import context, reset_context +from conda.common.compat import on_linux, on_mac, on_win +from conda.exceptions import ClobberError, CondaError, CondaMultiError, LinkError +from conda.utils import url_path +from conda_index.api import update_index + +from conda_build import __version__, api, exceptions +from conda_build.config import Config +from conda_build.exceptions import ( + CondaBuildException, + DependencyNeedsBuildingError, + OverDependingError, + OverLinkingError, +) from conda_build.os_utils.external import find_executable -from conda_build.exceptions import (DependencyNeedsBuildingError, CondaBuildException, - OverLinkingError, OverDependingError) -from conda_build.conda_interface import reset_context -from conda.exceptions import ClobberError, CondaMultiError -from conda_build.conda_interface import conda_46, conda_47 +from conda_build.render import finalize_metadata +from conda_build.utils import ( + check_call_env, + check_output_env, + convert_path_for_cygwin_or_msys2, + copy_into, + env_var, + get_conda_operation_locks, + package_has_file, + prepend_bin_path, + rm_rf, + walk, +) -from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling +from .utils import ( + add_mangling, + fail_dir, + get_valid_recipes, + metadata_dir, + metadata_path, + reset_config, +) + +if TYPE_CHECKING: + from pytest import FixtureRequest, MonkeyPatch + from pytest_mock import MockerFixture -# define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere -empty_sections = os.path.join(metadata_dir, "empty_sections") + from conda_build.metadata import MetaData def represent_ordereddict(dumper, data): @@ -59,15 +81,18 @@ def represent_ordereddict(dumper, data): value.append((node_key, node_value)) - return yaml.nodes.MappingNode('tag:yaml.org,2002:map', value) + return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) yaml.add_representer(OrderedDict, represent_ordereddict) class AnacondaClientArgs: - def __init__(self, specs, token=None, site=None, log_level=logging.INFO, force=False): + def __init__( + self, specs, token=None, site=None, log_level=logging.INFO, force=False + ): from binstar_client.utils import parse_specs + self.specs = [parse_specs(specs)] self.spec = self.specs[0] self.token = token @@ -84,35 +109,44 @@ def describe_root(cwd=None): return tag -@pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir) - if is_valid_dir(metadata_dir, dirname)]) -def recipe(request): - return os.path.join(metadata_dir, request.param) - - # This tests any of the folders in the test-recipes/metadata folder that don't start with _ @pytest.mark.slow @pytest.mark.serial -def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(metadata_dir) + ], +) +def test_recipe_builds( + recipe: Path, + testing_config, + monkeypatch: pytest.MonkeyPatch, + conda_build_test_recipe_envvar: str, +): # TODO: After we fix #3754 this mark can be removed. This specific test # ``source_setup_py_data_subdir`` reproduces the problem. - if os.path.basename(recipe) == "source_setup_py_data_subdir": + if recipe.name == "source_setup_py_data_subdir": pytest.xfail("Issue related to #3754 on conda-build.") + elif recipe.name == "unicode_all_over" and context.solver == "libmamba": + pytest.xfail("Unicode package names not supported in libmamba.") + # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - if 'unicode_all_over' in recipe and sys.version_info[0] == 2: - pytest.skip('unicode_all_over does not work on Python 2') - api.build(recipe, config=testing_config) + api.build(str(recipe), config=testing_config) @pytest.mark.serial -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. @@ -125,13 +159,16 @@ def test_ignore_prefix_files(testing_config, monkeypatch): @pytest.mark.serial -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. +# Need more time to figure the problem circumventing.. def test_ignore_some_prefix_files(testing_config, monkeypatch): recipe = os.path.join(metadata_dir, "_ignore_some_prefix_files") testing_config.activate = True @@ -142,18 +179,22 @@ def test_ignore_some_prefix_files(testing_config, monkeypatch): @pytest.mark.serial @pytest.mark.xfail -def test_token_upload(testing_workdir, testing_metadata): +def test_token_upload(testing_metadata): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' - args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid, - token="co-143399b8-276e-48db-b43f-4a3de839a024", - force=True) + args = AnacondaClientArgs( + specs="conda_build_test/test_token_upload_" + folder_uuid, + token="co-143399b8-276e-48db-b43f-4a3de839a024", + force=True, + ) with pytest.raises(NotFound): show.main(args) - testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid]) + testing_metadata.meta["package"]["name"] = "_".join( + [testing_metadata.name(), folder_uuid] + ) testing_metadata.config.token = args.token # the folder with the test recipe to upload @@ -173,8 +214,13 @@ def test_token_upload(testing_workdir, testing_metadata): @pytest.mark.sanity @pytest.mark.serial @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_config, capfd): - api.build(empty_sections, config=testing_config, notest=True) +def test_no_anaconda_upload_condarc( + service_name: str, + testing_config, + capfd, + conda_build_test_recipe_envvar: str, +): + api.build(str(metadata_path / "empty_sections"), config=testing_config, notest=True) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error @@ -182,18 +228,23 @@ def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_confi @pytest.mark.sanity @pytest.mark.serial @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_offline(service_name, testing_config): - with env_var('CONDA_OFFLINE', 'True', reset_context): - api.build(empty_sections, config=testing_config) +def test_offline( + service_name: str, testing_config, conda_build_test_recipe_envvar: str +): + with env_var("CONDA_OFFLINE", "True", reset_context): + api.build(str(metadata_path / "empty_sections"), config=testing_config) def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") - m = api.render(recipe_path, config=testing_config)[0][0] - output = api.get_output_file_path(m)[0] + metadata = api.render(recipe_path, config=testing_config)[0][0] + output = api.get_output_file_paths(metadata)[0] # missing hash because we set custom build string in meta.yaml - test_path = os.path.join(testing_config.croot, testing_config.host_subdir, - "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2") + test_path = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2", + ) assert test_path == output @@ -206,7 +257,7 @@ def test_no_include_recipe_config_arg(testing_metadata): # make sure that it is not there when the command line flag is passed testing_metadata.config.include_recipe = False - testing_metadata.meta['build']['number'] = 2 + testing_metadata.meta["build"]["number"] = 2 # We cannot test packages without recipes as we cannot render them output_file = api.build(testing_metadata, notest=True)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") @@ -219,14 +270,18 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): outputs = api.build(testing_metadata, notest=True) assert package_has_file(outputs[0], "info/recipe/meta.yaml") - output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), - config=testing_config, notest=True)[0] + output_file = api.build( + os.path.join(metadata_dir, "_no_include_recipe"), + config=testing_config, + notest=True, + )[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") with pytest.raises(SystemExit): # we are testing that even with the recipe excluded, we still get the tests in place - output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), - config=testing_config)[0] + output_file = api.build( + os.path.join(metadata_dir, "_no_include_recipe"), config=testing_config + )[0] @pytest.mark.serial @@ -234,74 +289,96 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): def test_early_abort(testing_config, capfd): """There have been some problems with conda-build dropping out early. Make sure we aren't causing them""" - api.build(os.path.join(metadata_dir, '_test_early_abort'), config=testing_config) + api.build(os.path.join(metadata_dir, "_test_early_abort"), config=testing_config) output, error = capfd.readouterr() assert "Hello World" in output -def test_output_build_path_git_source(testing_workdir, testing_config): +def test_output_build_path_git_source(testing_config): recipe_path = os.path.join(metadata_dir, "source_git_jinja2") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_paths(m)[0] _hash = m.hash_dependencies() - test_path = os.path.join(testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash)) + test_path = os.path.join( + testing_config.croot, + testing_config.host_subdir, + f"conda-build-test-source-git-jinja2-1.20.2-py{sys.version_info.major}{sys.version_info.minor}{_hash}_0_g262d444.tar.bz2", + ) assert output == test_path @pytest.mark.sanity @pytest.mark.serial def test_build_with_no_activate_does_not_activate(): - api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False, - anaconda_upload=False) + api.build( + os.path.join(metadata_dir, "_set_env_var_no_activate_build"), + activate=False, + anaconda_upload=False, + ) @pytest.mark.sanity @pytest.mark.serial -@pytest.mark.xfail(on_win and len(os.getenv('PATH')) > 1024, reason="Long PATHs make activation fail with obscure messages") +@pytest.mark.xfail( + on_win and len(os.getenv("PATH")) > 1024, + reason="Long PATHs make activation fail with obscure messages", +) def test_build_with_activate_does_activate(): - api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True, - anaconda_upload=False) + api.build( + os.path.join(metadata_dir, "_set_env_var_activate_build"), + activate=True, + anaconda_upload=False, + ) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform == "win32", - reason="no binary prefix manipulation done on windows.") -def test_binary_has_prefix_files(testing_workdir, testing_config): - api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config) +@pytest.mark.skipif( + sys.platform == "win32", reason="no binary prefix manipulation done on windows." +) +def test_binary_has_prefix_files(testing_config): + api.build( + os.path.join(metadata_dir, "_binary_has_prefix_files"), config=testing_config + ) @pytest.mark.xfail @pytest.mark.sanity -@pytest.mark.skipif(sys.platform == "win32", - reason="no binary prefix manipulation done on windows.") -def test_binary_has_prefix_files_non_utf8(testing_workdir, testing_config): - api.build(os.path.join(metadata_dir, '_binary_has_utf_non_8'), config=testing_config) - - -def test_relative_path_git_versioning(testing_workdir, testing_config): - # conda_build_test_recipe is a manual step. Clone it at the same level as - # your conda-build source. - cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', - 'conda_build_test_recipe')) - tag = describe_root(cwd) - output = api.get_output_file_path(os.path.join(metadata_dir, - "_source_git_jinja2_relative_path"), - config=testing_config)[0] +@pytest.mark.skipif( + sys.platform == "win32", reason="no binary prefix manipulation done on windows." +) +def test_binary_has_prefix_files_non_utf8(testing_config): + api.build( + os.path.join(metadata_dir, "_binary_has_utf_non_8"), config=testing_config + ) + + +def test_relative_path_git_versioning( + testing_config, + conda_build_test_recipe_path: Path, + conda_build_test_recipe_envvar: str, +): + tag = describe_root(conda_build_test_recipe_path) + output = api.get_output_file_paths( + metadata_path / "_source_git_jinja2_relative_path", + config=testing_config, + )[0] assert tag in output -def test_relative_git_url_git_versioning(testing_workdir, testing_config): - cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', - 'conda_build_test_recipe')) - tag = describe_root(cwd) - recipe = os.path.join(metadata_dir, "_source_git_jinja2_relative_git_url") - output = api.get_output_file_path(recipe, config=testing_config)[0] +def test_relative_git_url_git_versioning( + testing_config, + conda_build_test_recipe_path: Path, + conda_build_test_recipe_envvar: str, +): + tag = describe_root(conda_build_test_recipe_path) + output = api.get_output_file_paths( + metadata_path / "_source_git_jinja2_relative_git_url", + config=testing_config, + )[0] assert tag in output -def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config): +def test_dirty_variable_available_in_build_scripts(testing_config): recipe = os.path.join(metadata_dir, "_dirty_skip_section") testing_config.dirty = True api.build(recipe, config=testing_config) @@ -320,42 +397,48 @@ def dummy_executable(folder, exename): prefix = "@echo off\n" else: prefix = "#!/bin/bash\nexec 1>&2\n" - with open(dummyfile, 'w') as f: - f.write(prefix + """ - echo ******* You have reached the dummy {}. It is likely there is a bug in + with open(dummyfile, "w") as f: + f.write( + prefix + + f""" + echo ******* You have reached the dummy {exename}. It is likely there is a bug in echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 - """.format(exename)) + """ + ) if sys.platform != "win32": import stat + st = os.stat(dummyfile) os.chmod(dummyfile, st.st_mode | stat.S_IEXEC) return exename +@pytest.mark.skip( + reason="GitHub discontinued SVN, see https://github.com/conda/conda-build/issues/5098" +) def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatch): # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) - FNULL = open(os.devnull, 'w') + FNULL = open(os.devnull, "w") with pytest.raises(subprocess.CalledProcessError): - check_call_env([exename, '--version'], stderr=FNULL) + check_call_env([exename, "--version"], stderr=FNULL) FNULL.close() env = os.environ.copy() env["PATH"] = os.pathsep.join([testing_workdir, env["PATH"]]) testing_config.activate = True - api.build(os.path.join(metadata_dir, '_checkout_tool_as_dependency'), config=testing_config) + api.build( + os.path.join(metadata_dir, "_checkout_tool_as_dependency"), + config=testing_config, + ) platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = sorted({"32", *platforms}) - compilers = [ - "3.6", - "3.7", - pytest.param("2.7", marks=pytest.mark.skip("Failing for Python 2.7")), - ] + compilers = ["3.10", "3.11", "3.12"] msvc_vers = ["14.0"] else: msvc_vers = [] @@ -366,51 +449,50 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc @pytest.mark.parametrize("msvc_ver", msvc_vers) def test_build_msvc_compiler(msvc_ver, monkeypatch): # verify that the correct compiler is available - cl_versions = {"9.0": 15, - "10.0": 16, - "11.0": 17, - "12.0": 18, - "14.0": 19} + cl_versions = {"9.0": 15, "10.0": 16, "11.0": 17, "12.0": 18, "14.0": 19} - monkeypatch.setenv('CONDATEST_MSVC_VER', msvc_ver) - monkeypatch.setenv('CL_EXE_VERSION', str(cl_versions[msvc_ver])) + monkeypatch.setenv("CONDATEST_MSVC_VER", msvc_ver) + monkeypatch.setenv("CL_EXE_VERSION", str(cl_versions[msvc_ver])) try: # Always build Python 2.7 - but set MSVC version manually via Jinja template - api.build(os.path.join(metadata_dir, '_build_msvc_compiler'), python="2.7") + api.build(os.path.join(metadata_dir, "_build_msvc_compiler"), python="2.7") except: raise finally: - del os.environ['CONDATEST_MSVC_VER'] - del os.environ['CL_EXE_VERSION'] + del os.environ["CONDATEST_MSVC_VER"] + del os.environ["CL_EXE_VERSION"] @pytest.mark.sanity @pytest.mark.parametrize("platform", platforms) @pytest.mark.parametrize("target_compiler", compilers) -def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config): - testing_config.variant['python'] = target_compiler +def test_cmake_generator(platform, target_compiler, testing_config): + testing_config.variant["python"] = target_compiler testing_config.activate = True - api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) + api.build(os.path.join(metadata_dir, "_cmake_generator"), config=testing_config) -@pytest.mark.skipif(sys.platform == "win32", - reason="No windows symlinks") -def test_symlink_fail(testing_workdir, testing_config): +@pytest.mark.skipif(sys.platform == "win32", reason="No windows symlinks") +def test_symlink_fail(testing_config): with pytest.raises((SystemExit, FileNotFoundError)): api.build(os.path.join(fail_dir, "symlinks"), config=testing_config) @pytest.mark.sanity -def test_pip_in_meta_yaml_fail(testing_workdir, testing_config): - with pytest.raises(ValueError, match='environment.yml'): - api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config) +def test_pip_in_meta_yaml_fail(testing_config): + with pytest.raises(ValueError, match="environment.yml"): + api.build( + os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config + ) @pytest.mark.sanity -def test_recursive_fail(testing_workdir, testing_config): - with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError), - match="recursive-build2"): +def test_recursive_fail(testing_config): + with pytest.raises( + (RuntimeError, exceptions.DependencyNeedsBuildingError), + match="recursive-build2", + ): api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config) # indentation critical here. If you indent this, and the exception is not raised, then # the exc variable here isn't really completely created and shows really strange errors: @@ -418,16 +500,20 @@ def test_recursive_fail(testing_workdir, testing_config): @pytest.mark.sanity -def test_jinja_typo(testing_workdir, testing_config): +def test_jinja_typo(testing_config): with pytest.raises(SystemExit, match="GIT_DSECRIBE_TAG"): - api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config) + api.build( + os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config + ) @pytest.mark.sanity -def test_skip_existing(testing_workdir, testing_config, capfd): +def test_skip_existing(testing_config, capfd, conda_build_test_recipe_envvar: str): # build the recipe first - api.build(empty_sections, config=testing_config) - api.build(empty_sections, config=testing_config, skip_existing=True) + api.build(str(metadata_path / "empty_sections"), config=testing_config) + api.build( + str(metadata_path / "empty_sections"), config=testing_config, skip_existing=True + ) output, error = capfd.readouterr() assert "are already built" in output @@ -438,13 +524,13 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): outputs = api.build(testing_metadata) # Copy our package into some new folder - output_dir = os.path.join(testing_workdir, 'someoutput') + output_dir = os.path.join(testing_workdir, "someoutput") platform = os.path.join(output_dir, testing_metadata.config.host_subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file - api.update_index(output_dir) + update_index(output_dir) testing_metadata.config.skip_existing = True testing_metadata.config.channel_urls = [url_path(output_dir)] @@ -455,14 +541,16 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): assert "are already built" in output -def test_failed_tests_exit_build(testing_workdir, testing_config): +def test_failed_tests_exit_build(testing_config): """https://github.com/conda/conda-build/issues/1112""" with pytest.raises(SystemExit, match="TESTS FAILED"): - api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config) + api.build( + os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config + ) @pytest.mark.sanity -def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): +def test_requirements_txt_for_run_reqs(testing_config): """ If run reqs are blank, then conda-build looks for requirements.txt in the recipe folder. There has been a report of issue with unsatisfiable requirements at @@ -472,14 +560,22 @@ def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): This test attempts to reproduce those conditions: a channel other than defaults with this requirements.txt """ - testing_config.channel_urls = ('conda_build_test', ) - api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config) + testing_config.channel_urls = ("conda_build_test",) + api.build( + os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config + ) -def test_compileall_compiles_all_good_files(testing_workdir, testing_config): - output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] - good_files = ['f1.py', 'f3.py'] - bad_file = 'f2_bad.py' +@pytest.mark.skipif( + sys.version_info >= (3, 10), + reason="Python 3.10+, py_compile terminates once it finds an invalid file", +) +def test_compileall_compiles_all_good_files(testing_config): + output = api.build( + os.path.join(metadata_dir, "_compile-test"), config=testing_config + )[0] + good_files = ["f1.py", "f3.py"] + bad_file = "f2_bad.py" for f in good_files: assert package_has_file(output, f) # look for the compiled file also @@ -489,11 +585,13 @@ def test_compileall_compiles_all_good_files(testing_workdir, testing_config): @pytest.mark.sanity -@pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths") -def test_backslash_in_always_include_files_path(testing_config): - api.build(os.path.join(metadata_dir, '_backslash_in_include_files')) +@pytest.mark.skipif( + not on_win, reason="only Windows is insane enough to have backslashes in paths" +) +def test_backslash_in_always_include_files_path(): + api.build(os.path.join(metadata_dir, "_backslash_in_include_files")) with pytest.raises(RuntimeError): - api.build(os.path.join(fail_dir, 'backslash_in_include_files')) + api.build(os.path.join(fail_dir, "backslash_in_include_files")) @pytest.mark.sanity @@ -501,28 +599,29 @@ def test_build_metadata_object(testing_metadata): api.build(testing_metadata) -def numpy_installed(): - return any([True for dist in linked(sys.prefix) if dist.name == 'numpy']) - - @pytest.mark.serial -@pytest.mark.skipif(not numpy_installed(), reason="numpy not installed in base environment") +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="numpy.distutils deprecated in Python 3.12+", +) def test_numpy_setup_py_data(testing_config): - recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') + recipe_path = os.path.join(metadata_dir, "_numpy_setup_py_data") # this shows an error that is OK to ignore: + # (Is this Error still relevant) # PackagesNotFoundError: The following packages are missing from the target environment: # - cython - subprocess.call('conda remove -y cython'.split()) + subprocess.call("conda remove -y cython".split()) with pytest.raises(CondaBuildException) as exc_info: - api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + api.render(recipe_path, config=testing_config, numpy="1.16") assert exc_info.match("Cython") subprocess.check_call(["conda", "install", "-y", "cython"]) - m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] - _hash = m.hash_dependencies() - assert os.path.basename(api.get_output_file_path(m)[0]) == \ - "load_setup_py_test-0.1.0-np116py{}{}{}_0.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash) + metadata = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + _hash = metadata.hash_dependencies() + assert ( + os.path.basename(api.get_output_file_paths(metadata)[0]) + == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" + ) @pytest.mark.slow @@ -540,76 +639,105 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke functions are using tools from the build env. """ - toplevel = os.path.join(testing_workdir, 'toplevel') + toplevel = os.path.join(testing_workdir, "toplevel") os.mkdir(toplevel) - relative_sub = os.path.join(testing_workdir, 'relative_sub') + relative_sub = os.path.join(testing_workdir, "relative_sub") os.mkdir(relative_sub) - absolute_sub = os.path.join(testing_workdir, 'absolute_sub') + absolute_sub = os.path.join(testing_workdir, "absolute_sub") os.mkdir(absolute_sub) sys_git_env = os.environ.copy() - sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build' - sys_git_env['GIT_AUTHOR_EMAIL'] = 'conda@conda-build.org' - sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build' - sys_git_env['GIT_COMMITTER_EMAIL'] = 'conda@conda-build.org' + sys_git_env["GIT_AUTHOR_NAME"] = "conda-build" + sys_git_env["GIT_AUTHOR_EMAIL"] = "conda@conda-build.org" + sys_git_env["GIT_COMMITTER_NAME"] = "conda-build" + sys_git_env["GIT_COMMITTER_EMAIL"] = "conda@conda-build.org" # Find the git executable before putting our dummy one on PATH. - git = find_executable('git') + git = find_executable("git") # Put the broken git on os.environ["PATH"] - exename = dummy_executable(testing_workdir, 'git') + exename = dummy_executable(testing_workdir, "git") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) # .. and ensure it gets run (and fails). - FNULL = open(os.devnull, 'w') + FNULL = open(os.devnull, "w") # Strangely .. # stderr=FNULL suppresses the output from echo on OS X whereas # stdout=FNULL suppresses the output from echo on Windows with pytest.raises(subprocess.CalledProcessError): - check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL) + check_call_env([exename, "--version"], stdout=FNULL, stderr=FNULL) FNULL.close() for tag in range(2): os.chdir(absolute_sub) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('absolute', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("absolute", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'absolute'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'absolute{tag}'], - env=sys_git_env) + check_call_env([git, "add", "absolute"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"absolute{tag}"], env=sys_git_env) os.chdir(relative_sub) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('relative', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("relative", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'relative'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'relative{tag}'], - env=sys_git_env) + check_call_env([git, "add", "relative"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"relative{tag}"], env=sys_git_env) os.chdir(toplevel) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('toplevel', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("toplevel", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'toplevel'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'toplevel{tag}'], - env=sys_git_env) + check_call_env([git, "add", "toplevel"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"toplevel{tag}"], env=sys_git_env) if tag == 0: - check_call_env([git, 'submodule', 'add', - convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'], - env=sys_git_env) - check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'], - env=sys_git_env) + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "add", + convert_path_for_cygwin_or_msys2(git, absolute_sub), + "absolute", + ], + env=sys_git_env, + ) + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "add", + "../relative_sub", + "relative", + ], + env=sys_git_env, + ) else: # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we # can change this to `git submodule update --recursive`. - gits = git.replace('\\', '/') - check_call_env([git, 'submodule', 'foreach', gits, 'pull'], env=sys_git_env) - check_call_env([git, 'commit', '-am', f'added submodules@{tag}'], - env=sys_git_env) - check_call_env([git, 'tag', '-a', str(tag), '-m', f'tag {tag}'], - env=sys_git_env) + gits = git.replace("\\", "/") + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "foreach", + gits, + "pull", + ], + env=sys_git_env, + ) + check_call_env( + [git, "commit", "-am", f"added submodules@{tag}"], env=sys_git_env + ) + check_call_env( + [git, "tag", "-a", str(tag), "-m", f"tag {tag}"], env=sys_git_env + ) # It is possible to use `Git for Windows` here too, though you *must* not use a different # (type of) git than the one used above to add the absolute submodule, because .gitmodules @@ -617,43 +745,43 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As # things stand, my _b_env folder for this test contains more than 80 characters. - requirements = ('requirements', OrderedDict([ - ('build', - ['git # [False]', - 'm2-git # [win]', - 'm2-filesystem # [win]'])])) - recipe_dir = os.path.join(testing_workdir, 'recipe') + recipe_dir = os.path.join(testing_workdir, "recipe") if not os.path.exists(recipe_dir): os.makedirs(recipe_dir) - filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml') - data = OrderedDict([ - ('package', OrderedDict([ - ('name', 'relative_submodules'), - ('version', '{{ GIT_DESCRIBE_TAG }}')])), - ('source', OrderedDict([ - ('git_url', toplevel), - ('git_tag', str(tag))])), - requirements, - ('build', OrderedDict([ - ('script', - ['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > ' - '%PREFIX%\\summaries.txt # [win]', - 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > ' - '$PREFIX/summaries.txt # [not win]']) - ])), - ('test', OrderedDict([ - ('commands', - ['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]' - .format(tag, tag), - 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]', - 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]' - .format(tag, tag), - 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]']) - ])) - ]) - - with open(filename, 'w') as outfile: + filename = os.path.join(testing_workdir, "recipe", "meta.yaml") + data = { + "package": { + "name": "relative_submodules", + "version": "{{ GIT_DESCRIBE_TAG }}", + }, + "source": {"git_url": toplevel, "git_tag": str(tag)}, + "requirements": { + "build": [ + "git # [False]", + "m2-git # [win]", + "m2-filesystem # [win]", + ], + }, + "build": { + "script": [ + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > " + "%PREFIX%\\summaries.txt # [win]", + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > " + "$PREFIX/summaries.txt # [not win]", + ], + }, + "test": { + "commands": [ + f"echo absolute{tag}relative{tag} > %PREFIX%\\expected_summaries.txt # [win]", + "fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]", + f"echo absolute{tag}relative{tag} > $PREFIX/expected_summaries.txt # [not win]", + "diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]", + ], + }, + } + + with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) # Reset the path because our broken, dummy `git` would cause `render_recipe` # to fail, while no `git` will cause the build_dependencies to be installed. @@ -661,51 +789,57 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # This will (after one spin round the loop) install and run 'git' with the # build env prepended to os.environ[] metadata = api.render(testing_workdir, config=testing_config)[0][0] - output = api.get_output_file_path(metadata, config=testing_config)[0] - assert (f"relative_submodules-{tag}-" in output) + output = api.get_output_file_paths(metadata, config=testing_config)[0] + assert f"relative_submodules-{tag}-" in output api.build(metadata, config=testing_config) def test_noarch(testing_workdir): - filename = os.path.join(testing_workdir, 'meta.yaml') + filename = os.path.join(testing_workdir, "meta.yaml") for noarch in (False, True): - data = OrderedDict([ - ('package', OrderedDict([ - ('name', 'test'), - ('version', '0.0.0')])), - ('build', OrderedDict([ - ('noarch', noarch)])) - ]) - with open(filename, 'w') as outfile: + data = OrderedDict( + [ + ("package", OrderedDict([("name", "test"), ("version", "0.0.0")])), + ("build", OrderedDict([("noarch", noarch)])), + ] + ) + with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) - output = api.get_output_file_path(testing_workdir)[0] - assert (os.path.sep + "noarch" + os.path.sep in output or not noarch) - assert (os.path.sep + "noarch" + os.path.sep not in output or noarch) + output = api.get_output_file_paths(testing_workdir)[0] + assert os.path.sep + "noarch" + os.path.sep in output or not noarch + assert os.path.sep + "noarch" + os.path.sep not in output or noarch -def test_disable_pip(testing_config, testing_metadata): +def test_disable_pip(testing_metadata): testing_metadata.config.disable_pip = True - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"' + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + testing_metadata.meta["build"]["script"] = ( + 'python -c "import pip; print(pip.__version__)"' + ) with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) - testing_metadata.meta['build']['script'] = ('python -c "import setuptools; ' - 'print(setuptools.__version__)"') + testing_metadata.meta["build"]["script"] = ( + 'python -c "import setuptools; print(setuptools.__version__)"' + ) with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform.startswith('win'), - reason="rpath fixup not done on Windows.") -def test_rpath_unix(testing_config): +@pytest.mark.skipif( + sys.platform.startswith("win"), reason="rpath fixup not done on Windows." +) +def test_rpath_unix(testing_config, variants_conda_build_sysroot): testing_config.activate = True - api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config) + api.build( + os.path.join(metadata_dir, "_rpath"), + config=testing_config, + variants=variants_conda_build_sysroot, + ) -def test_noarch_none_value(testing_workdir, testing_config): +def test_noarch_none_value(testing_config): recipe = os.path.join(metadata_dir, "_noarch_none") with pytest.raises(exceptions.CondaBuildException): api.build(recipe, config=testing_config) @@ -713,36 +847,33 @@ def test_noarch_none_value(testing_workdir, testing_config): @pytest.mark.sanity def test_noarch_foo_value(testing_config): - outputs = api.build(os.path.join(metadata_dir, "noarch_generic"), config=testing_config) - metadata = json.loads(package_has_file(outputs[0], 'info/index.json')) - assert metadata['noarch'] == "generic" + outputs = api.build( + os.path.join(metadata_dir, "noarch_generic"), config=testing_config + ) + metadata = json.loads(package_has_file(outputs[0], "info/index.json")) + assert metadata["noarch"] == "generic" def test_about_json_content(testing_metadata): outputs = api.build(testing_metadata) - about = json.loads(package_has_file(outputs[0], 'info/about.json')) - assert 'conda_version' in about and about['conda_version'] == conda.__version__ - assert 'conda_build_version' in about and about['conda_build_version'] == __version__ - assert 'channels' in about and about['channels'] - assert 'tags' in about and about['tags'] == ["a", "b"] + about = json.loads(package_has_file(outputs[0], "info/about.json")) + assert "conda_version" in about and about["conda_version"] == conda.__version__ + assert ( + "conda_build_version" in about and about["conda_build_version"] == __version__ + ) + assert "channels" in about and about["channels"] + assert "tags" in about and about["tags"] == ["a", "b"] # this one comes in as a string - test type coercion - assert 'identifiers' in about and about['identifiers'] == ["a"] - try: - assert 'env_vars' in about and about['env_vars'] - except AssertionError: - # new versions of conda support this, so we should raise errors. - if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'): - raise - else: - pass + assert "identifiers" in about and about["identifiers"] == ["a"] + assert "env_vars" in about and about["env_vars"] - assert 'root_pkgs' in about and about['root_pkgs'] + assert "root_pkgs" in about and about["root_pkgs"] @pytest.mark.parametrize( "name,field", [("license", "license_file"), ("prelink_message", "prelink_message")] ) -def test_about_license_file_and_prelink_message(testing_workdir, testing_config, name, field): +def test_about_license_file_and_prelink_message(testing_config, name, field): base_dir = os.path.join(metadata_dir, f"_about_{field}/recipes") recipe = os.path.join(base_dir, "single") @@ -756,10 +887,18 @@ def test_about_license_file_and_prelink_message(testing_workdir, testing_config, recipe = os.path.join(base_dir, "dir") outputs = api.build(recipe, config=testing_config) - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-source/first-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-source/second-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-recipe/first-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-recipe/second-{name}.txt") + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-source/first-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-source/second-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-recipe/first-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-recipe/second-{name}.txt" + ) recipe = os.path.join(base_dir, "dir-no-slash-suffix") assert os.path.isdir(recipe) @@ -769,92 +908,81 @@ def test_about_license_file_and_prelink_message(testing_workdir, testing_config, @pytest.mark.slow -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. -@pytest.mark.xfail(parse_version(conda.__version__) < parse_version("4.3.14"), - reason="new noarch supported starting with conda 4.3.14") def test_noarch_python_with_tests(testing_config): recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") pkg = api.build(recipe, config=testing_config)[0] # noarch recipes with commands should generate both .bat and .sh files. - assert package_has_file(pkg, 'info/test/run_test.bat') - assert package_has_file(pkg, 'info/test/run_test.sh') + assert package_has_file(pkg, "info/test/run_test.bat") + assert package_has_file(pkg, "info/test/run_test.sh") @pytest.mark.sanity def test_noarch_python_1(testing_config): - output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0] - assert package_has_file(output, 'info/files') != '' - extra = json.loads(package_has_file(output, 'info/link.json')) - assert 'noarch' in extra - assert 'entry_points' in extra['noarch'] - assert 'type' in extra['noarch'] - assert 'package_metadata_version' in extra - - -@pytest.mark.sanity -@pytest.mark.xfail(conda_47, reason="parallel verify/execute in conda 4.7 breaks legacy noarch, which depends on having the env files present before pre-link scripts are run.") -def test_legacy_noarch_python(testing_config): - output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"), - config=testing_config)[0] - # make sure that the package is going into the noarch folder - assert os.path.basename(os.path.dirname(output)) == 'noarch' - - -@pytest.mark.skipif(True, - reason="Re-enable when private application environments are fully implemented " - "in conda. " - "See https://github.com/conda/conda/issues/3912#issuecomment-374820599") -def test_preferred_env(testing_config): - recipe = os.path.join(metadata_dir, "_preferred_env") - output = api.build(recipe, config=testing_config)[0] - extra = json.loads(package_has_file(output, 'info/link.json').decode()) - assert 'preferred_env' in extra - assert 'name' in extra['preferred_env'] - assert 'executable_paths' in extra['preferred_env'] - exe_paths = extra['preferred_env']['executable_paths'] - if on_win: - assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat'] - else: - assert exe_paths == ['bin/exepath1', 'bin/exepath2'] - assert 'package_metadata_version' in extra + output = api.build( + os.path.join(metadata_dir, "_noarch_python"), config=testing_config + )[0] + assert package_has_file(output, "info/files") != "" + extra = json.loads(package_has_file(output, "info/link.json")) + assert "noarch" in extra + assert "entry_points" in extra["noarch"] + assert "type" in extra["noarch"] + assert "package_metadata_version" in extra @pytest.mark.sanity def test_skip_compile_pyc(testing_config): - outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config + ) tf = tarfile.open(outputs[0]) pyc_count = 0 for f in tf.getmembers(): filename = os.path.basename(f.name) _, ext = os.path.splitext(filename) - basename = filename.split('.', 1)[0] - if basename == 'skip_compile_pyc': - assert not ext == '.pyc', f"a skip_compile_pyc .pyc was compiled: {filename}" - if ext == '.pyc': - assert basename == 'compile_pyc', f"an unexpected .pyc was compiled: {filename}" + basename = filename.split(".", 1)[0] + if basename == "skip_compile_pyc": + assert ( + not ext == ".pyc" + ), f"a skip_compile_pyc .pyc was compiled: {filename}" + if ext == ".pyc": + assert ( + basename == "compile_pyc" + ), f"an unexpected .pyc was compiled: {filename}" pyc_count = pyc_count + 1 - assert pyc_count == 2, f"there should be 2 .pyc files, instead there were {pyc_count}" + assert ( + pyc_count == 2 + ), f"there should be 2 .pyc files, instead there were {pyc_count}" def test_detect_binary_files_with_prefix(testing_config): - outputs = api.build(os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), - config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), + config=testing_config, + ) matches = [] with tarfile.open(outputs[0]) as tf: - has_prefix = tf.extractfile('info/has_prefix') - contents = [p.strip().decode('utf-8') for p in - has_prefix.readlines()] + has_prefix = tf.extractfile("info/has_prefix") + contents = [p.strip().decode("utf-8") for p in has_prefix.readlines()] has_prefix.close() - matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or - entry.endswith('"binary-has-prefix"')] + matches = [ + entry + for entry in contents + if entry.endswith("binary-has-prefix") + or entry.endswith('"binary-has-prefix"') + ] assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix" - assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix" + assert ( + " binary " in matches[0] + ), "binary-has-prefix not recorded as binary in info/has_prefix" def test_skip_detect_binary_files_with_prefix(testing_config): @@ -863,16 +991,21 @@ def test_skip_detect_binary_files_with_prefix(testing_config): matches = [] with tarfile.open(outputs[0]) as tf: try: - has_prefix = tf.extractfile('info/has_prefix') - contents = [p.strip().decode('utf-8') for p in - has_prefix.readlines()] + has_prefix = tf.extractfile("info/has_prefix") + contents = [p.strip().decode("utf-8") for p in has_prefix.readlines()] has_prefix.close() - matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or - entry.endswith('"binary-has-prefix"')] + matches = [ + entry + for entry in contents + if entry.endswith("binary-has-prefix") + or entry.endswith('"binary-has-prefix"') + ] except: pass - assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \ - "build/detect_binary_files_with_prefix: false" + assert len(matches) == 0, ( + "binary-has-prefix recorded in info/has_prefix despite:" + "build/detect_binary_files_with_prefix: false" + ) def test_fix_permissions(testing_config): @@ -880,18 +1013,21 @@ def test_fix_permissions(testing_config): outputs = api.build(recipe, config=testing_config) with tarfile.open(outputs[0]) as tf: for f in tf.getmembers(): - assert f.mode & 0o444 == 0o444, f"tar member '{f.name}' has invalid (read) mode" + assert ( + f.mode & 0o444 == 0o444 + ), f"tar member '{f.name}' has invalid (read) mode" @pytest.mark.sanity @pytest.mark.skipif(not on_win, reason="windows-only functionality") -@pytest.mark.parametrize('recipe_name', ["_script_win_creates_exe", - "_script_win_creates_exe_garbled"]) +@pytest.mark.parametrize( + "recipe_name", ["_script_win_creates_exe", "_script_win_creates_exe_garbled"] +) def test_script_win_creates_exe(testing_config, recipe_name): recipe = os.path.join(metadata_dir, recipe_name) outputs = api.build(recipe, config=testing_config) - assert package_has_file(outputs[0], 'Scripts/test-script.exe') - assert package_has_file(outputs[0], 'Scripts/test-script-script.py') + assert package_has_file(outputs[0], "Scripts/test-script.exe") + assert package_has_file(outputs[0], "Scripts/test-script-script.py") @pytest.mark.sanity @@ -902,25 +1038,36 @@ def test_output_folder_moves_file(testing_metadata, testing_workdir): @pytest.mark.sanity -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) def test_info_files_json(testing_config): - outputs = api.build(os.path.join(metadata_dir, "_ignore_some_prefix_files"), - config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "_ignore_some_prefix_files"), config=testing_config + ) assert package_has_file(outputs[0], "info/paths.json") with tarfile.open(outputs[0]) as tf: - data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8')) - fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link", - "prefix_placeholder", "inode_paths"] + data = json.loads(tf.extractfile("info/paths.json").read().decode("utf-8")) + fields = [ + "_path", + "sha256", + "size_in_bytes", + "path_type", + "file_mode", + "no_link", + "prefix_placeholder", + "inode_paths", + ] for key in data.keys(): - assert key in ['paths', 'paths_version'] - for paths in data.get('paths'): + assert key in ["paths", "paths_version"] + for paths in data.get("paths"): for field in paths.keys(): assert field in fields - assert len(data.get('paths')) == 2 - for file in data.get('paths'): + assert len(data.get("paths")) == 2 + for file in data.get("paths"): for key in file.keys(): assert key in fields short_path = file.get("_path") @@ -932,52 +1079,59 @@ def test_info_files_json(testing_config): assert file.get("file_mode") is None -def test_build_expands_wildcards(mocker, testing_workdir): +def test_build_expands_wildcards(mocker): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() - files = ['abc', 'acb'] + files = ["abc", "acb"] for f in files: os.makedirs(f) - with open(os.path.join(f, 'meta.yaml'), 'w') as fh: - fh.write('\n') + with open(os.path.join(f, "meta.yaml"), "w") as fh: + fh.write("\n") api.build(["a*"], config=config) - output = sorted(os.path.join(os.getcwd(), path, 'meta.yaml') for path in files) + output = sorted(os.path.join(os.getcwd(), path, "meta.yaml") for path in files) - build_tree.assert_called_once_with(output, - config=mocker.ANY, - stats=mocker.ANY, - build_only=False, - post=None, notest=False, - variants=None) + build_tree.assert_called_once_with( + output, + config=mocker.ANY, + stats=mocker.ANY, + build_only=False, + post=None, + notest=False, + variants=None, + ) -@pytest.mark.parametrize('set_build_id', [True, False]) +@pytest.mark.parametrize("set_build_id", [True, False]) def test_remove_workdir_default(testing_config, caplog, set_build_id): - recipe = os.path.join(metadata_dir, '_keep_work_dir') + recipe = os.path.join(metadata_dir, "_keep_work_dir") # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, set_build_id=set_build_id) - assert not glob(os.path.join(metadata.config.work_dir, '*')) + assert not glob(os.path.join(metadata.config.work_dir, "*")) def test_keep_workdir_and_dirty_reuse(testing_config, capfd): - recipe = os.path.join(metadata_dir, '_keep_work_dir') + recipe = os.path.join(metadata_dir, "_keep_work_dir") # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another - metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] + metadata = api.render( + recipe, config=testing_config, dirty=True, remove_work_dir=False + )[0][0] workdir = metadata.config.work_dir api.build(metadata) out, err = capfd.readouterr() - assert glob(os.path.join(metadata.config.work_dir, '*')) + assert glob(os.path.join(metadata.config.work_dir, "*")) # test that --dirty reuses the same old folder - metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] + metadata = api.render( + recipe, config=testing_config, dirty=True, remove_work_dir=False + )[0][0] assert workdir == metadata.config.work_dir # test that without --dirty, we don't reuse the folder @@ -989,221 +1143,244 @@ def test_keep_workdir_and_dirty_reuse(testing_config, capfd): @pytest.mark.sanity def test_workdir_removal_warning(testing_config, caplog): - recipe = os.path.join(metadata_dir, '_test_uses_src_dir') + recipe = os.path.join(metadata_dir, "_test_uses_src_dir") with pytest.raises(ValueError) as exc: api.build(recipe, config=testing_config) assert "work dir is removed" in str(exc) -# @pytest.mark.serial -# @pytest.mark.skipif(not sys.platform.startswith('linux'), -# reason="cross compiler packages created only on Linux right now") -# @pytest.mark.xfail(VersionOrder(conda.__version__) < VersionOrder('4.3.2'), -# reason="not completely implemented yet") -# def test_cross_compiler(testing_workdir, testing_config, capfd): -# # TODO: testing purposes. Package from @mingwandroid's channel, copied to conda_build_test -# testing_config.channel_urls = ('conda_build_test', ) -# # activation is necessary to set the appropriate toolchain env vars -# testing_config.activate = True -# # testing_config.debug = True -# recipe_dir = os.path.join(metadata_dir, '_cross_helloworld') -# output = api.build(recipe_dir, config=testing_config)[0] -# assert output.startswith(os.path.join(testing_config.croot, 'linux-imx351uc')) - - @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") -def test_append_python_app_osx(testing_config): +@pytest.mark.skipif(sys.platform != "darwin", reason="relevant to mac only") +def test_append_python_app_osx(testing_config, conda_build_test_recipe_envvar: str): """Recipes that use osx_is_app need to have python.app in their runtime requirements. conda-build will add it if it's missing.""" - recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app') + recipe = os.path.join(metadata_dir, "_osx_is_app_missing_python_app") # tests will fail here if python.app is not added to the run reqs by conda-build, because # without it, pythonw will be missing. api.build(recipe, config=testing_config) -# Not sure about this behavior. Basically, people need to realize that if they -# start with a recipe from disk, they should not then alter the metadata -# object. Later reparsing will clobber their edits to the object. The -# complicated thing is that these edits are indistinguishable from Jinja2 -# templating doing its normal thing. - -# def test_clobbering_manually_set_metadata_raises(testing_metadata, testing_workdir): -# api.output_yaml(testing_metadata, 'meta.yaml') -# metadata = api.render(testing_workdir)[0][0] -# # make the package meta dict out of sync with file contents -# metadata.meta['package']['name'] = 'steve' -# # re-render happens as part of build. We should see an error about clobbering our customized -# # meta dict -# with pytest.raises(ValueError): -# api.build(metadata) - - @pytest.mark.sanity def test_run_exports(testing_metadata, testing_config, testing_workdir): - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, notest=True) - api.build(os.path.join(metadata_dir, '_run_exports_implicit_weak'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) + api.build( + os.path.join(metadata_dir, "_run_exports_implicit_weak"), + config=testing_config, + notest=True, + ) # run_exports is tricky. We mostly only ever want things in "host". Here are the conditions: # 1. only build section present (legacy recipe). Here, use run_exports from build. Because build and host # will be merged when build subdir == host_subdir, the weak run_exports should be present. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] - api.output_yaml(testing_metadata, 'meta.yaml') - m = api.render(testing_workdir, config=testing_config)[0][0] - assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] - assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run'] + testing_metadata.meta["requirements"]["build"] = ["test_has_run_exports"] + api.output_yaml(testing_metadata, "meta.yaml") + metadata = api.render(testing_workdir, config=testing_config)[0][0] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports_implicit_weak', - '{{ compiler("c") }}'] - testing_metadata.meta['requirements']['host'] = ['python'] - api.output_yaml(testing_metadata, 'host_present_weak/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'host_present_weak'), config=testing_config)[0][0] - assert 'weak_pinned_package 2.0.*' not in m.meta['requirements'].get('run', []) + testing_metadata.meta["requirements"]["build"] = [ + "test_has_run_exports_implicit_weak", + '{{ compiler("c") }}', + ] + testing_metadata.meta["requirements"]["host"] = ["python"] + api.output_yaml(testing_metadata, "host_present_weak/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "host_present_weak"), config=testing_config + )[0][0] + assert "weak_pinned_package 2.0.*" not in metadata.meta["requirements"].get( + "run", [] + ) # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports', '{{ compiler("c") }}'] - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports_implicit_weak'] - api.output_yaml(testing_metadata, 'host_present_strong/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'host_present_strong'), - config=testing_config)[0][0] - assert 'strong_pinned_package 1.0 0' in m.meta['requirements']['host'] - assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] + testing_metadata.meta["requirements"]["build"] = [ + "test_has_run_exports", + '{{ compiler("c") }}', + ] + testing_metadata.meta["requirements"]["host"] = [ + "test_has_run_exports_implicit_weak" + ] + api.output_yaml(testing_metadata, "host_present_strong/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "host_present_strong"), config=testing_config + )[0][0] + assert "strong_pinned_package 1.0 0" in metadata.meta["requirements"]["host"] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports should be excluded, since it is a build dep - assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run'] + assert "weak_pinned_package 1.0.*" not in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports_implicit_weak should be present, since it is a host dep - assert 'weak_pinned_package 2.0.*' in m.meta['requirements']['run'] + assert "weak_pinned_package 2.0.*" in metadata.meta["requirements"]["run"] @pytest.mark.sanity def test_ignore_run_exports(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports'] - testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package'] + testing_metadata.meta["requirements"]["host"] = ["test_has_run_exports"] + testing_metadata.meta["build"]["ignore_run_exports"] = ["downstream_pinned_package"] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) - assert 'downstream_pinned_package 1.0' not in m.meta['requirements'].get('run', []) + assert "downstream_pinned_package 1.0" not in m.meta["requirements"].get("run", []) @pytest.mark.sanity def test_ignore_run_exports_from(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports'] - testing_metadata.meta['build']['ignore_run_exports_from'] = ['test_has_run_exports'] + testing_metadata.meta["requirements"]["host"] = ["test_has_run_exports"] + testing_metadata.meta["build"]["ignore_run_exports_from"] = ["test_has_run_exports"] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) - assert 'downstream_pinned_package 1.0' not in m.meta['requirements'].get('run', []) + assert "downstream_pinned_package 1.0" not in m.meta["requirements"].get("run", []) -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) def test_run_exports_noarch_python(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports_noarch'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports_noarch"), + config=testing_config, + notest=True, + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['python'] - testing_metadata.meta['requirements']['run'] = ['python'] - testing_metadata.meta['build']['noarch'] = 'python' + testing_metadata.meta["requirements"]["host"] = ["python"] + testing_metadata.meta["requirements"]["run"] = ["python"] + testing_metadata.meta["build"]["noarch"] = "python" testing_metadata.config.index = None - testing_metadata.config.variant["python"] = "3.6 with_run_exports" + testing_metadata.config.variant["python"] = "3.8 with_run_exports" m = finalize_metadata(testing_metadata) - assert 'python 3.6 with_run_exports' in m.meta['requirements'].get('host', []) - assert 'python 3.6 with_run_exports' not in m.meta['requirements'].get('run', []) + assert "python 3.6 with_run_exports" in m.meta["requirements"].get("host", []) + assert "python 3.6 with_run_exports" not in m.meta["requirements"].get("run", []) def test_run_exports_constrains(testing_metadata, testing_config, testing_workdir): - api.build(os.path.join(metadata_dir, '_run_exports_constrains'), config=testing_config, - notest=True) - - testing_metadata.meta['requirements']['build'] = ['run_exports_constrains'] - testing_metadata.meta['requirements']['host'] = [] - api.output_yaml(testing_metadata, 'in_build/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'in_build'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'strong_run_export'} == reqs_set('run') - assert {'strong_constrains_export'} == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = [] - testing_metadata.meta['requirements']['host'] = ['run_exports_constrains'] - api.output_yaml(testing_metadata, 'in_host/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'in_host'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'strong_run_export', 'weak_run_export'} == reqs_set('run') - assert {'strong_constrains_export', 'weak_constrains_export'} == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = ['run_exports_constrains_only_weak'] - testing_metadata.meta['requirements']['host'] = [] - api.output_yaml(testing_metadata, 'only_weak_in_build/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'only_weak_in_build'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert set() == reqs_set('run') - assert set() == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = [] - testing_metadata.meta['requirements']['host'] = ['run_exports_constrains_only_weak'] - api.output_yaml(testing_metadata, 'only_weak_in_host/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'only_weak_in_host'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'weak_run_export'} == reqs_set('run') - assert {'weak_constrains_export'} == reqs_set('run_constrained') + api.build( + os.path.join(metadata_dir, "_run_exports_constrains"), + config=testing_config, + notest=True, + ) + + testing_metadata.meta["requirements"]["build"] = ["run_exports_constrains"] + testing_metadata.meta["requirements"]["host"] = [] + api.output_yaml(testing_metadata, "in_build/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) + assert {"strong_run_export"} == reqs_set("run") + assert {"strong_constrains_export"} == reqs_set("run_constrained") + + testing_metadata.meta["requirements"]["build"] = [] + testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains"] + api.output_yaml(testing_metadata, "in_host/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) + assert {"strong_run_export", "weak_run_export"} == reqs_set("run") + assert {"strong_constrains_export", "weak_constrains_export"} == reqs_set( + "run_constrained" + ) + + testing_metadata.meta["requirements"]["build"] = [ + "run_exports_constrains_only_weak" + ] + testing_metadata.meta["requirements"]["host"] = [] + api.output_yaml(testing_metadata, "only_weak_in_build/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "only_weak_in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) + assert set() == reqs_set("run") + assert set() == reqs_set("run_constrained") + + testing_metadata.meta["requirements"]["build"] = [] + testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains_only_weak"] + api.output_yaml(testing_metadata, "only_weak_in_host/meta.yaml") + metadata = api.render( + os.path.join(testing_workdir, "only_weak_in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) + assert {"weak_run_export"} == reqs_set("run") + assert {"weak_constrains_export"} == reqs_set("run_constrained") def test_pin_subpackage_exact(testing_config): - recipe = os.path.join(metadata_dir, '_pin_subpackage_exact') - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - assert any(re.match(r'run_exports_subpkg\ 1\.0\ 0', req) - for (m, _, _) in ms for req in m.meta.get('requirements', {}).get('run', [])) + recipe = os.path.join(metadata_dir, "_pin_subpackage_exact") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + assert any( + re.match(r"run_exports_subpkg\ 1\.0\ 0", req) + for metadata, _, _ in metadata_tuples + for req in metadata.meta.get("requirements", {}).get("run", []) + ) @pytest.mark.sanity @pytest.mark.serial -@pytest.mark.skipif(sys.platform != 'linux', reason="xattr code written here is specific to linux") -def test_copy_read_only_file_with_xattr(testing_config, testing_homedir): - if not testing_homedir: - return pytest.xfail("could not create a temporary folder in {} (tmpfs inappropriate for xattrs)". - format('${HOME}' if sys.platform != 'win32' else '%UserProfile%')) - src_recipe = os.path.join(metadata_dir, '_xattr_copy') - recipe = os.path.join(testing_homedir, '_xattr_copy') - copy_into(src_recipe, recipe) - # file is r/w for owner, but we change it to 400 after setting the attribute - ro_file = os.path.join(recipe, 'mode_400_file') - # tmpfs on modern Linux does not support xattr in general. - # https://stackoverflow.com/a/46598063 - # tmpfs can support extended attributes if you enable CONFIG_TMPFS_XATTR in Kernel config. - # But Currently this enables support for the trusted.* and security.* namespaces - try: - subprocess.check_call(f'setfattr -n user.attrib -v somevalue {ro_file}', shell=True) - except: - return pytest.xfail("setfattr not possible in {}, see https://stackoverflow.com/a/46598063".format( - testing_homedir)) - subprocess.check_call(f'chmod 400 {ro_file}', shell=True) - api.build(recipe, config=testing_config) +@pytest.mark.skipif(on_mac and not which("xattr"), reason="`xattr` unavailable") +@pytest.mark.skipif(on_linux and not which("setfattr"), reason="`setfattr` unavailable") +@pytest.mark.skipif(on_win, reason="Windows doesn't support xattr") +def test_copy_read_only_file_with_xattr(testing_config: Config, testing_homedir: Path): + recipe = Path(testing_homedir, "_xattr_copy") + copy_into(metadata_path / "_xattr_copy", recipe) + + # file is u=rw,go=r (0o644) to start, change it to u=r,go= (0o400) after setting the attribute + ro_file = recipe / "mode_400_file" + + # set extended attributes + if on_linux: + # tmpfs on modern Linux does not support xattr in general. + # https://stackoverflow.com/a/46598063 + # tmpfs can support extended attributes if you enable CONFIG_TMPFS_XATTR in Kernel config. + # But Currently this enables support for the trusted.* and security.* namespaces + try: + subprocess.run( + f"setfattr -n user.attrib -v somevalue {ro_file}", + shell=True, + check=True, + ) + except subprocess.CalledProcessError: + pytest.xfail("`setfattr` failed, see https://stackoverflow.com/a/46598063") + else: + subprocess.run( + f"xattr -w user.attrib somevalue {ro_file}", + shell=True, + check=True, + ) + + # restrict file permissions + ro_file.chmod(0o400) + + api.build(str(recipe), config=testing_config) @pytest.mark.sanity @pytest.mark.serial def test_env_creation_fail_exits_build(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry') + recipe = os.path.join(metadata_dir, "_post_link_exits_after_retry") with pytest.raises((RuntimeError, LinkError, CondaError, KeyError)): api.build(recipe, config=testing_config) - recipe = os.path.join(metadata_dir, '_post_link_exits_tests') + recipe = os.path.join(metadata_dir, "_post_link_exits_tests") with pytest.raises((RuntimeError, LinkError, CondaError, KeyError)): api.build(recipe, config=testing_config) @@ -1213,20 +1390,22 @@ def test_recursion_packages(testing_config): """Two packages that need to be built are listed in the recipe make sure that both get built before the one needing them gets built.""" - recipe = os.path.join(metadata_dir, '_recursive-build-two-packages') + recipe = os.path.join(metadata_dir, "_recursive-build-two-packages") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_recursion_layers(testing_config): """go two 'hops' - try to build a, but a needs b, so build b first, then come back to a""" - recipe = os.path.join(metadata_dir, '_recursive-build-two-layers') + recipe = os.path.join(metadata_dir, "_recursive-build-two-layers") api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'win32', reason=("spaces break openssl prefix " - "replacement on *nix")) +@pytest.mark.skipif( + sys.platform != "win32", + reason="spaces break openssl prefix replacement on *nix", +) def test_croot_with_spaces(testing_metadata, testing_workdir): testing_metadata.config.croot = os.path.join(testing_workdir, "space path") api.build(testing_metadata) @@ -1234,178 +1413,194 @@ def test_croot_with_spaces(testing_metadata, testing_workdir): @pytest.mark.sanity def test_unknown_selectors(testing_config): - recipe = os.path.join(metadata_dir, 'unknown_selector') + recipe = os.path.join(metadata_dir, "unknown_selector") api.build(recipe, config=testing_config) -@pytest.mark.sanity -def test_extract_tarball_with_unicode_filename(testing_config): - """See https://github.com/conda/conda-build/pull/1779""" - recipe = os.path.join(metadata_dir, '_unicode_in_tarball') - api.build(recipe, config=testing_config) - - -def test_failed_recipe_leaves_folders(testing_config, testing_workdir): - recipe = os.path.join(fail_dir, 'recursive-build') - m = api.render(recipe, config=testing_config)[0][0] - locks = get_conda_operation_locks(m.config) +# the locks can be very flaky on GitHub Windows Runners +# https://github.com/conda/conda-build/issues/4685 +@pytest.mark.flaky(reruns=5, reruns_delay=2) +def test_failed_recipe_leaves_folders(testing_config): + recipe = os.path.join(fail_dir, "recursive-build") + metadata = api.render(recipe, config=testing_config)[0][0] + locks = get_conda_operation_locks(metadata.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): - api.build(m) - assert os.path.isdir(m.config.build_folder), 'build folder was removed' - assert os.listdir(m.config.build_folder), 'build folder has no files' + api.build(metadata) + assert os.path.isdir(metadata.config.build_folder), "build folder was removed" + assert os.listdir(metadata.config.build_folder), "build folder has no files" + # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations - any_locks = False - locks_list = set() - locale.getpreferredencoding(False) - for lock in locks: - if os.path.isfile(lock.lock_file): - any_locks = True - dest_path = base64.b64decode(os.path.basename(lock.lock_file)) - if hasattr(dest_path, 'decode'): - dest_path = dest_path.decode(sys.getfilesystemencoding(), errors='backslashreplace') - locks_list.add((lock.lock_file, dest_path)) - assert not any_locks, "remaining locks:\n{}".format('\n'.join('->'.join((l, r)) - for (l, r) in locks_list)) + assert [lock.lock_file for lock in locks if os.path.isfile(lock.lock_file)] == [] @pytest.mark.sanity def test_only_r_env_vars_defined(testing_config): - recipe = os.path.join(metadata_dir, '_r_env_defined') + recipe = os.path.join(metadata_dir, "_r_env_defined") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_only_perl_env_vars_defined(testing_config): - recipe = os.path.join(metadata_dir, '_perl_env_defined') + recipe = os.path.join(metadata_dir, "_perl_env_defined") api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(on_win, reason='no lua package on win') +@pytest.mark.skipif(on_win, reason="no lua package on win") def test_only_lua_env(testing_config): - recipe = os.path.join(metadata_dir, '_lua_env_defined') + recipe = os.path.join(metadata_dir, "_lua_env_defined") testing_config.set_build_id = False api.build(recipe, config=testing_config) def test_run_constrained_stores_constrains_info(testing_config): - recipe = os.path.join(metadata_dir, '_run_constrained') + recipe = os.path.join(metadata_dir, "_run_constrained") out_file = api.build(recipe, config=testing_config)[0] - info_contents = json.loads(package_has_file(out_file, 'info/index.json')) - assert 'constrains' in info_contents - assert len(info_contents['constrains']) == 1 - assert info_contents['constrains'][0] == 'bzip2 1.*' + info_contents = json.loads(package_has_file(out_file, "info/index.json")) + assert "constrains" in info_contents + assert len(info_contents["constrains"]) == 1 + assert info_contents["constrains"][0] == "bzip2 1.*" @pytest.mark.sanity def test_no_locking(testing_config): - recipe = os.path.join(metadata_dir, 'source_git_jinja2') - api.update_index(os.path.join(testing_config.croot)) + recipe = os.path.join(metadata_dir, "source_git_jinja2") + update_index(os.path.join(testing_config.croot)) api.build(recipe, config=testing_config, locking=False) @pytest.mark.sanity -def test_test_dependencies(testing_workdir, testing_config): - recipe = os.path.join(fail_dir, 'check_test_dependencies') +def test_test_dependencies(testing_config): + recipe = os.path.join(fail_dir, "check_test_dependencies") with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) - assert 'Unsatisfiable dependencies for platform ' in str(e.value) - assert 'pytest-package-does-not-exist' in str(e.value) + assert "Unsatisfiable dependencies for platform " in str(e.value) + assert "pytest-package-does-not-exist" in str(e.value) @pytest.mark.sanity -def test_runtime_dependencies(testing_workdir, testing_config): - recipe = os.path.join(fail_dir, 'check_runtime_dependencies') +def test_runtime_dependencies(testing_config): + recipe = os.path.join(fail_dir, "check_runtime_dependencies") with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) - assert 'Unsatisfiable dependencies for platform ' in str(e.value) - assert 'some-nonexistent-package1' in str(e.value) + assert "Unsatisfiable dependencies for platform " in str(e.value) + assert "some-nonexistent-package1" in str(e.value) @pytest.mark.sanity -def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata): - testing_metadata.config.anaconda_upload = True - del testing_metadata.meta['test'] - api.output_yaml(testing_metadata, 'meta.yaml') - call = mocker.patch.object(conda_build.build.subprocess, 'call') - cc_conda_build['force_upload'] = False +def test_no_force_upload( + mocker: MockerFixture, + monkeypatch: MonkeyPatch, + testing_workdir: str | os.PathLike | Path, + testing_metadata: MetaData, + request: FixtureRequest, +): + # this is nearly identical to tests/cli/test_main_build.py::test_no_force_upload + # only difference is this tests `conda_build.api.build` + request.addfinalizer(reset_config) + call = mocker.patch("subprocess.call") + anaconda = find_executable("anaconda") + + # render recipe + api.output_yaml(testing_metadata, "meta.yaml") + + # mock Config.set_keys to always set anaconda_upload to True + # conda's Context + conda_build's MetaData & Config objects interact in such an + # awful way that mocking these configurations is ugly and confusing, all of it + # needs major refactoring + set_keys = Config.set_keys # store original method + override = {"anaconda_upload": True} + monkeypatch.setattr( + Config, + "set_keys", + lambda self, **kwargs: set_keys(self, **{**kwargs, **override}), + ) + + # check for normal upload + override["force_upload"] = False pkg = api.build(testing_workdir) - assert call.called_once_with(['anaconda', 'upload', pkg]) - del cc_conda_build['force_upload'] + call.assert_called_once_with([anaconda, "upload", *pkg]) + call.reset_mock() + + # check for force upload + override["force_upload"] = True pkg = api.build(testing_workdir) - assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) + call.assert_called_once_with([anaconda, "upload", "--force", *pkg]) @pytest.mark.sanity def test_setup_py_data_in_env(testing_config): - recipe = os.path.join(metadata_dir, '_setup_py_data_in_env') + recipe = os.path.join(metadata_dir, "_setup_py_data_in_env") # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic with pytest.raises(subprocess.CalledProcessError): - api.build(recipe, config=testing_config, python='3.5') + api.build(recipe, config=testing_config, python="3.5") @pytest.mark.sanity def test_numpy_xx(testing_config): - recipe = os.path.join(metadata_dir, '_numpy_xx') - api.render(recipe, config=testing_config, numpy='1.15', python="3.6") + recipe = os.path.join(metadata_dir, "_numpy_xx") + api.render(recipe, config=testing_config, numpy="1.15", python="3.6") @pytest.mark.sanity def test_numpy_xx_host(testing_config): - recipe = os.path.join(metadata_dir, '_numpy_xx_host') - api.render(recipe, config=testing_config, numpy='1.15', python="3.6") + recipe = os.path.join(metadata_dir, "_numpy_xx_host") + api.render(recipe, config=testing_config, numpy="1.15", python="3.6") @pytest.mark.sanity def test_python_xx(testing_config): - recipe = os.path.join(metadata_dir, '_python_xx') - api.render(recipe, config=testing_config, python='3.5') + recipe = os.path.join(metadata_dir, "_python_xx") + api.render(recipe, config=testing_config, python="3.5") @pytest.mark.sanity -def test_indirect_numpy_dependency(testing_metadata, testing_workdir, testing_config): - testing_metadata.meta['requirements']['build'] = ['pandas'] - api.output_yaml(testing_metadata, os.path.join(testing_workdir, 'meta.yaml')) - api.render(testing_workdir, numpy='1.13', notest=True) +def test_indirect_numpy_dependency(testing_metadata, testing_workdir): + testing_metadata.meta["requirements"]["build"] = ["pandas"] + api.output_yaml(testing_metadata, os.path.join(testing_workdir, "meta.yaml")) + api.render(testing_workdir, numpy="1.13", notest=True) @pytest.mark.sanity -def test_dependencies_with_notest(testing_workdir, testing_config): - recipe = os.path.join(metadata_dir, '_test_dependencies') +def test_dependencies_with_notest(testing_config): + recipe = os.path.join(metadata_dir, "_test_dependencies") api.build(recipe, config=testing_config, notest=True) with pytest.raises(DependencyNeedsBuildingError) as excinfo: api.build(recipe, config=testing_config, notest=False) - assert 'Unsatisfiable dependencies for platform' in str(excinfo.value) - assert 'somenonexistentpackage1' in str(excinfo.value) + assert "Unsatisfiable dependencies for platform" in str(excinfo.value) + assert "somenonexistentpackage1" in str(excinfo.value) @pytest.mark.sanity def test_source_cache_build(testing_workdir): - recipe = os.path.join(metadata_dir, 'source_git_jinja2') + recipe = os.path.join(metadata_dir, "source_git_jinja2") config = api.Config(src_cache_root=testing_workdir) api.build(recipe, notest=True, config=config) - git_cache_directory = f'{testing_workdir}/git_cache' + git_cache_directory = f"{testing_workdir}/git_cache" assert os.path.isdir(git_cache_directory) - files = [filename for _, _, filenames in walk(git_cache_directory) - for filename in filenames] + files = [ + filename + for _, _, filenames in walk(git_cache_directory) + for filename in filenames + ] assert len(files) > 0 @pytest.mark.slow def test_copy_test_source_files(testing_config): - recipe = os.path.join(metadata_dir, '_test_test_source_files') + recipe = os.path.join(metadata_dir, "_test_test_source_files") filenames = set() for copy in (False, True): testing_config.copy_test_source_files = copy @@ -1419,23 +1614,29 @@ def test_copy_test_source_files(testing_config): # nesting of test/test here is because info/test is the main folder # for test files, then test is the source_files folder we specify, # and text.txt is within that. - if f.name == 'info/test/test_files_folder/text.txt': + if f.name == "info/test/test_files_folder/text.txt": found = True break if found: - assert copy, "'info/test/test_files_folder/text.txt' found in tar.bz2 but not copying test source files" + assert copy, ( + "'info/test/test_files_folder/text.txt' found in tar.bz2 " + "but not copying test source files" + ) if copy: api.test(outputs[0]) else: with pytest.raises(RuntimeError): api.test(outputs[0]) else: - assert not copy, "'info/test/test_files_folder/text.txt' not found in tar.bz2 but copying test source files. File list: %r" % files + assert not copy, ( + "'info/test/test_files_folder/text.txt' not found in tar.bz2 " + f"but copying test source files. File list: {files!r}" + ) @pytest.mark.sanity def test_copy_test_source_files_deps(testing_config): - recipe = os.path.join(metadata_dir, '_test_test_source_files') + recipe = os.path.join(metadata_dir, "_test_test_source_files") for copy in (False, True): testing_config.copy_test_source_files = copy # test is that pytest is a dep either way. Builds will fail if it's not. @@ -1446,165 +1647,207 @@ def test_pin_depends(testing_config): """purpose of 'record' argument is to put a 'requires' file that records pinned run dependencies """ - recipe = os.path.join(metadata_dir, '_pin_depends_record') - m = api.render(recipe, config=testing_config)[0][0] + recipe = os.path.join(metadata_dir, "_pin_depends_record") + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record - assert not any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) - output = api.build(m, config=testing_config)[0] - requires = package_has_file(output, 'info/requires') + assert not any( + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] + ) + output = api.build(metadata, config=testing_config)[0] + requires = package_has_file(output, "info/requires") assert requires - if hasattr(requires, 'decode'): + if hasattr(requires, "decode"): requires = requires.decode() - assert re.search(r'python\=[23]\.', requires), "didn't find pinned python in info/requires" + assert re.search( + r"python\=[23]\.", requires + ), "didn't find pinned python in info/requires" @pytest.mark.sanity def test_failed_patch_exits_build(testing_config): with pytest.raises(RuntimeError): - api.build(os.path.join(metadata_dir, '_bad_patch'), config=testing_config) + api.build(os.path.join(metadata_dir, "_bad_patch"), config=testing_config) @pytest.mark.sanity -def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder( - testing_config): +def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config): # unsatisfiable; also not buildable (test_a recipe version is 2.0) - testing_config.variant['test_a'] = "1.0" - recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b') + testing_config.variant["test_a"] = "1.0" + recipe = os.path.join(metadata_dir, "_build_deps_no_infinite_loop", "test_b") with pytest.raises(DependencyNeedsBuildingError): api.build(recipe, config=testing_config) # passes now, because package can be built, or is already built. Doesn't matter which. - testing_config.variant['test_a'] = "2.0" + testing_config.variant["test_a"] = "2.0" api.build(recipe, config=testing_config) @pytest.mark.sanity def test_provides_features_metadata(testing_config): - recipe = os.path.join(metadata_dir, '_requires_provides_features') + recipe = os.path.join(metadata_dir, "_requires_provides_features") out = api.build(recipe, config=testing_config)[0] - index = json.loads(package_has_file(out, 'info/index.json')) - assert 'requires_features' in index - assert index['requires_features'] == {'test': 'ok'} - assert 'provides_features' in index - assert index['provides_features'] == {'test2': 'also_ok'} + index = json.loads(package_has_file(out, "info/index.json")) + assert "requires_features" in index + assert index["requires_features"] == {"test": "ok"} + assert "provides_features" in index + assert index["provides_features"] == {"test2": "also_ok"} -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="py-lief not available on win for Python 2.7") -def test_overlinking_detection(testing_config): +def test_overlinking_detection( + testing_config, testing_workdir, variants_conda_build_sysroot +): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overlinking_detection') - dest_sh = os.path.join(recipe, 'build.sh') - dest_bat = os.path.join(recipe, 'bld.bat') - copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) - copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) + recipe = os.path.join(testing_workdir, "recipe") + copy_into( + os.path.join(metadata_dir, "_overlinking_detection"), + recipe, + ) + dest_sh = os.path.join(recipe, "build.sh") + dest_bat = os.path.join(recipe, "bld.bat") + copy_into( + os.path.join(recipe, "build_scripts", "default.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "default.bat"), dest_bat, clobber=True + ) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) + copy_into( + os.path.join(recipe, "build_scripts", "no_as_needed.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "with_bzip2.bat"), dest_bat, clobber=True + ) with pytest.raises(OverLinkingError): - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) rm_rf(dest_bat) -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="py-lief not available on win for Python 2.7") -def test_overlinking_detection_ignore_patterns(testing_config): +def test_overlinking_detection_ignore_patterns( + testing_config, testing_workdir, variants_conda_build_sysroot +): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overlinking_detection_ignore_patterns') - dest_sh = os.path.join(recipe, 'build.sh') - dest_bat = os.path.join(recipe, 'bld.bat') - copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) - copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) + recipe = os.path.join(testing_workdir, "recipe") + copy_into( + os.path.join(metadata_dir, "_overlinking_detection_ignore_patterns"), + recipe, + ) + dest_sh = os.path.join(recipe, "build.sh") + dest_bat = os.path.join(recipe, "bld.bat") + copy_into( + os.path.join(recipe, "build_scripts", "default.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "default.bat"), dest_bat, clobber=True + ) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) + copy_into( + os.path.join(recipe, "build_scripts", "no_as_needed.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "with_bzip2.bat"), dest_bat, clobber=True + ) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) rm_rf(dest_bat) -def test_overdepending_detection(testing_config): +def test_overdepending_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overdepending_detection') + recipe = os.path.join(metadata_dir, "_overdepending_detection") with pytest.raises(OverDependingError): - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) -@pytest.mark.skipif(sys.platform != "darwin", - reason="macOS-only test (at present)") -def test_macos_tbd_handling(testing_config): +@pytest.mark.skipif(sys.platform != "darwin", reason="macOS-only test (at present)") +def test_macos_tbd_handling(testing_config, variants_conda_build_sysroot): + """ + Test path handling after installation... The test case uses a Hello World + example in C/C++ for testing the installation of C libraries... + """ testing_config.activate = True testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_macos_tbd_handling') - api.build(recipe, config=testing_config) + recipe = os.path.join(metadata_dir, "_macos_tbd_handling") + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) @pytest.mark.sanity def test_empty_package_with_python_in_build_and_host_barfs(testing_config): - recipe = os.path.join(metadata_dir, '_empty_pkg_with_python_build_host') + recipe = os.path.join(metadata_dir, "_empty_pkg_with_python_build_host") with pytest.raises(CondaBuildException): api.build(recipe, config=testing_config) @pytest.mark.sanity def test_empty_package_with_python_and_compiler_in_build_barfs(testing_config): - recipe = os.path.join(metadata_dir, '_compiler_python_build_section') + recipe = os.path.join(metadata_dir, "_compiler_python_build_section") with pytest.raises(CondaBuildException): api.build(recipe, config=testing_config) @pytest.mark.sanity def test_downstream_tests(testing_config): - upstream = os.path.join(metadata_dir, '_test_downstreams/upstream') - downstream = os.path.join(metadata_dir, '_test_downstreams/downstream') + upstream = os.path.join(metadata_dir, "_test_downstreams/upstream") + downstream = os.path.join(metadata_dir, "_test_downstreams/downstream") api.build(downstream, config=testing_config, notest=True) with pytest.raises(SystemExit): api.build(upstream, config=testing_config) @pytest.mark.sanity -@pytest.mark.xfail(not conda_46, - reason="conda 4.6 changed logger level from info to warn") def test_warning_on_file_clobbering(testing_config, capfd): - recipe_dir = os.path.join(metadata_dir, '_overlapping_files_warning') - - api.build(os.path.join(recipe_dir, 'a', ), config=testing_config) - api.build(os.path.join(recipe_dir, 'b', ), config=testing_config) + recipe_dir = os.path.join(metadata_dir, "_overlapping_files_warning") + + api.build( + os.path.join( + recipe_dir, + "a", + ), + config=testing_config, + ) + api.build( + os.path.join( + recipe_dir, + "b", + ), + config=testing_config, + ) # The clobber warning here is raised when creating the test environment for b out, err = capfd.readouterr() assert "ClobberWarning" in err with pytest.raises((ClobberError, CondaMultiError)): - with env_var('CONDA_PATH_CONFLICT', 'prevent', reset_context): - api.build(os.path.join(recipe_dir, 'b'), config=testing_config) + with env_var("CONDA_PATH_CONFLICT", "prevent", reset_context): + api.build(os.path.join(recipe_dir, "b"), config=testing_config) @pytest.mark.sanity -@pytest.mark.serial +@pytest.mark.skip(reason="conda-verify is deprecated because it is unsupported") def test_verify_bad_package(testing_config): from conda_verify.errors import PackageError - recipe_dir = os.path.join(fail_dir, 'create_bad_folder_for_conda_verify') + + recipe_dir = os.path.join(fail_dir, "create_bad_folder_for_conda_verify") api.build(recipe_dir, config=testing_config) with pytest.raises(PackageError): testing_config.exit_on_verify_error = True api.build(recipe_dir, config=testing_config) # ignore the error that we know should be raised, and re-run to make sure it is actually ignored - testing_config.ignore_verify_codes = ['C1125', 'C1115'] + testing_config.ignore_verify_codes = ["C1125", "C1115"] api.build(recipe_dir, config=testing_config) @pytest.mark.sanity def test_ignore_verify_codes(testing_config): - recipe_dir = os.path.join(metadata_dir, '_ignore_verify_codes') + recipe_dir = os.path.join(metadata_dir, "_ignore_verify_codes") testing_config.exit_on_verify_error = True # this recipe intentionally has a license error. If ignore_verify_codes works, # it will build OK. If not, it will error out. @@ -1612,27 +1855,32 @@ def test_ignore_verify_codes(testing_config): @pytest.mark.sanity -def test_extra_meta(testing_config): - recipe_dir = os.path.join(metadata_dir, '_extra_meta') - testing_config.extra_meta = {'foo': 'bar'} +def test_extra_meta(testing_config, caplog): + recipe_dir = os.path.join(metadata_dir, "_extra_meta") + extra_meta_data = {"foo": "bar"} + testing_config.extra_meta = extra_meta_data outputs = api.build(recipe_dir, config=testing_config) - about = json.loads(package_has_file(outputs[0], 'info/about.json')) - assert 'foo' in about['extra'] and about['extra']['foo'] == 'bar' + about = json.loads(package_has_file(outputs[0], "info/about.json")) + assert "foo" in about["extra"] and about["extra"]["foo"] == "bar" + assert ( + f"Adding the following extra-meta data to about.json: {extra_meta_data}" + in caplog.text + ) def test_symlink_dirs_in_always_include_files(testing_config): - recipe = os.path.join(metadata_dir, '_symlink_dirs_in_always_include_files') + recipe = os.path.join(metadata_dir, "_symlink_dirs_in_always_include_files") api.build(recipe, config=testing_config) def test_clean_rpaths(testing_config): - recipe = os.path.join(metadata_dir, '_clean_rpaths') + recipe = os.path.join(metadata_dir, "_clean_rpaths") api.build(recipe, config=testing_config, activate=True) def test_script_env_warnings(testing_config, recwarn): - recipe_dir = os.path.join(metadata_dir, '_script_env_warnings') - token = 'CONDA_BUILD_PYTEST_SCRIPT_ENV_TEST_TOKEN' + recipe_dir = os.path.join(metadata_dir, "_script_env_warnings") + token = "CONDA_BUILD_PYTEST_SCRIPT_ENV_TEST_TOKEN" def assert_keyword(keyword): messages = [str(w.message) for w in recwarn.list] @@ -1640,15 +1888,83 @@ def assert_keyword(keyword): recwarn.clear() api.build(recipe_dir, config=testing_config) - assert_keyword('undefined') + assert_keyword("undefined") os.environ[token] = "SECRET" try: api.build(recipe_dir, config=testing_config) - assert_keyword('SECRET') + assert_keyword("SECRET") testing_config.suppress_variables = True api.build(recipe_dir, config=testing_config) - assert_keyword('') + assert_keyword("") finally: os.environ.pop(token) + + +@pytest.mark.slow +def test_activated_prefixes_in_actual_path(testing_metadata): + """ + Check if build and host env are properly added to PATH in the correct order. + Do this in an actual build and not just in a unit test to avoid regression. + Currently only tests for single non-"outputs" recipe with build/host split + and proper env activation (Metadata.is_cross and Config.activate both True). + """ + file = "env-path-dump" + testing_metadata.config.activate = True + meta = testing_metadata.meta + meta["requirements"]["host"] = [] + meta["build"]["script"] = [ + f"echo %PATH%>%PREFIX%/{file}" if on_win else f"echo $PATH>$PREFIX/{file}" + ] + outputs = api.build(testing_metadata) + env = {"PATH": ""} + # We get the PATH entries twice: (which we should fix at some point) + # 1. from the environment activation hooks, + # 2. also beforehand from utils.path_prepended at the top of + # - build.write_build_scripts on Unix + # - windows.build on Windows + # And apparently here the previously added build env gets deactivated + # from the activation hook, hence only host is on PATH twice. + prepend_bin_path(env, testing_metadata.config.host_prefix) + if not on_win: + prepend_bin_path(env, testing_metadata.config.build_prefix) + prepend_bin_path(env, testing_metadata.config.host_prefix) + prepend_bin_path(env, testing_metadata.config.build_prefix) + expected_paths = [path for path in env["PATH"].split(os.pathsep) if path] + actual_paths = [ + path + for path in package_has_file(outputs[0], file).strip().split(os.pathsep) + if path in expected_paths + ] + assert actual_paths == expected_paths + + +@pytest.mark.parametrize("add_pip_as_python_dependency", [False, True]) +def test_add_pip_as_python_dependency_from_condarc_file( + testing_metadata, testing_workdir, add_pip_as_python_dependency, monkeypatch +): + """ + Test whether settings from .condarc files are heeded. + ref: https://github.com/conda/conda-libmamba-solver/issues/393 + """ + # TODO: SubdirData._cache_ clearing might not be needed for future conda versions. + # See https://github.com/conda/conda/pull/13365 for proposed changes. + from conda.core.subdir_data import SubdirData + + # SubdirData's cache doesn't distinguish on add_pip_as_python_dependency. + SubdirData._cache_.clear() + + testing_metadata.meta["build"]["script"] = ['python -c "import pip"'] + testing_metadata.meta["requirements"]["host"] = ["python"] + del testing_metadata.meta["test"] + if add_pip_as_python_dependency: + check_build_fails = nullcontext() + else: + check_build_fails = pytest.raises(subprocess.CalledProcessError) + + conda_rc = Path(testing_workdir, ".condarc") + conda_rc.write_text(f"add_pip_as_python_dependency: {add_pip_as_python_dependency}") + with env_var("CONDARC", conda_rc, reset_context): + with check_build_fails: + api.build(testing_metadata) diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index b7c38a96e8..dc4078e61f 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -11,7 +11,7 @@ @pytest.mark.parametrize("pkg_format,pkg_ext", [(None, ".tar.bz2"), ("2", ".conda")]) def test_conda_pkg_format( - pkg_format, pkg_ext, testing_config, testing_workdir, monkeypatch, capfd + pkg_format, pkg_ext, testing_config, monkeypatch, capfd, request ): """Conda package format "2" builds .conda packages.""" @@ -25,14 +25,19 @@ def test_conda_pkg_format( monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - output_file, = api.get_output_file_paths(recipe, config=testing_config) + # Recipe "entry_points" is used in other test -> add test-specific variant + # (change build hash) to avoid clashes in package cache from other tests. + variants = {"pytest_name": [request.node.name]} + (output_file,) = api.get_output_file_paths( + recipe, config=testing_config, variants=variants + ) assert output_file.endswith(pkg_ext) - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants) assert os.path.exists(output_file) out, err = capfd.readouterr() # Verify that test pass ran through api assert "Manual entry point" in out - assert "TEST END: %s" % output_file in out + assert f"TEST END: {output_file}" in out diff --git a/tests/test_api_build_dll_package.py b/tests/test_api_build_dll_package.py index 32adb88cc4..8c2ede1e7b 100644 --- a/tests/test_api_build_dll_package.py +++ b/tests/test_api_build_dll_package.py @@ -1,23 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os import pytest -from conda_build import api +from conda_build.api import build -from .utils import thisdir - - -@pytest.fixture() -def recipe(): - return os.path.join(thisdir, 'test-recipes', 'dll-package') +from .utils import dll_dir @pytest.mark.sanity -def test_recipe_build(recipe, testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - api.build(recipe, config=testing_config) + build(dll_dir, config=testing_config) diff --git a/tests/test_api_build_go_package.py b/tests/test_api_build_go_package.py index 16e7a27385..35cc12a965 100644 --- a/tests/test_api_build_go_package.py +++ b/tests/test_api_build_go_package.py @@ -1,24 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os import pytest -from conda_build import api +from conda_build.api import build -from .utils import thisdir - - -@pytest.fixture() -def recipe(): - return os.path.join(thisdir, 'test-recipes', 'go-package') +from .utils import go_dir @pytest.mark.sanity @pytest.mark.serial -def test_recipe_build(recipe, testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - api.build(recipe, config=testing_config) + build(go_dir, config=testing_config) diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 7931b99b55..9dac14351c 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -2,65 +2,86 @@ # SPDX-License-Identifier: BSD-3-Clause # This file makes sure that our API has not changed. Doing so can not be accidental. Whenever it # happens, we should bump our major build number, because we may have broken someone. - import sys +from inspect import getfullargspec as getargspec import pytest from conda_build import api -from inspect import getfullargspec as getargspec - pytestmark = pytest.mark.no_default_testing_config def test_api_config(): - assert hasattr(api, 'Config') - assert hasattr(api, 'get_or_merge_config') + assert hasattr(api, "Config") + assert hasattr(api, "get_or_merge_config") def test_api_get_or_merge_config(): argspec = getargspec(api.get_or_merge_config) - assert argspec.args == ['config', 'variant'] - assert argspec.defaults == (None, ) + assert argspec.args == ["config", "variant"] + assert argspec.defaults == (None,) def test_api_render(): argspec = getargspec(api.render) - assert argspec.args == ['recipe_path', 'config', 'variants', - 'permit_unsatisfiable_variants', 'finalize', - 'bypass_env_check'] + assert argspec.args == [ + "recipe_path", + "config", + "variants", + "permit_unsatisfiable_variants", + "finalize", + "bypass_env_check", + ] assert argspec.defaults == (None, None, True, True, False) def test_api_output_yaml(): argspec = getargspec(api.output_yaml) - assert argspec.args == ['metadata', 'file_path', 'suppress_outputs'] + assert argspec.args == ["metadata", "file_path", "suppress_outputs"] assert argspec.defaults == (None, False) -def test_api_get_output_file_path(): - argspec = getargspec(api.get_output_file_path) - assert argspec.args == ['recipe_path_or_metadata', 'no_download_source', 'config', 'variants'] +def test_api_get_output_file_paths(): + argspec = getargspec(api.get_output_file_paths) + assert argspec.args == [ + "recipe_path_or_metadata", + "no_download_source", + "config", + "variants", + ] assert argspec.defaults == (False, None, None) def test_api_check(): argspec = getargspec(api.check) - assert argspec.args == ['recipe_path', 'no_download_source', 'config', 'variants'] + assert argspec.args == ["recipe_path", "no_download_source", "config", "variants"] assert argspec.defaults == (False, None, None) def test_api_build(): argspec = getargspec(api.build) - assert argspec.args == ['recipe_paths_or_metadata', 'post', 'need_source_download', - 'build_only', 'notest', 'config', 'variants', 'stats'] + assert argspec.args == [ + "recipe_paths_or_metadata", + "post", + "need_source_download", + "build_only", + "notest", + "config", + "variants", + "stats", + ] assert argspec.defaults == (None, True, False, False, None, None, None) def test_api_test(): argspec = getargspec(api.test) - assert argspec.args == ['recipedir_or_package_or_metadata', 'move_broken', 'config', 'stats'] + assert argspec.args == [ + "recipedir_or_package_or_metadata", + "move_broken", + "config", + "stats", + ] assert argspec.defaults == (True, None, None) @@ -72,60 +93,91 @@ def test_api_list_skeletons(): def test_api_skeletonize(): argspec = getargspec(api.skeletonize) - assert argspec.args == ['packages', 'repo', 'output_dir', 'version', 'recursive', 'config'] - assert argspec.defaults == ('.', None, False, None) + assert argspec.args == [ + "packages", + "repo", + "output_dir", + "version", + "recursive", + "config", + ] + assert argspec.defaults == (".", None, False, None) def test_api_develop(): argspec = getargspec(api.develop) - assert argspec.args == ['recipe_dir', 'prefix', 'no_pth_file', 'build_ext', - 'clean', 'uninstall'] + assert argspec.args == [ + "recipe_dir", + "prefix", + "no_pth_file", + "build_ext", + "clean", + "uninstall", + ] assert argspec.defaults == (sys.prefix, False, False, False, False) def test_api_convert(): argspec = getargspec(api.convert) - assert argspec.args == ['package_file', 'output_dir', 'show_imports', 'platforms', 'force', - 'dependencies', 'verbose', 'quiet', 'dry_run'] - assert argspec.defaults == ('.', False, None, False, None, False, True, False) + assert argspec.args == [ + "package_file", + "output_dir", + "show_imports", + "platforms", + "force", + "dependencies", + "verbose", + "quiet", + "dry_run", + ] + assert argspec.defaults == (".", False, None, False, None, False, True, False) def test_api_installable(): argspec = getargspec(api.test_installable) - assert argspec.args == ['channel'] - assert argspec.defaults == ('defaults',) + assert argspec.args == ["channel"] + assert argspec.defaults == ("defaults",) def test_api_inspect_linkages(): argspec = getargspec(api.inspect_linkages) - assert argspec.args == ['packages', 'prefix', 'untracked', 'all_packages', - 'show_files', 'groupby', 'sysroot'] - assert argspec.defaults == (sys.prefix, False, False, False, 'package', '') + assert argspec.args == [ + "packages", + "prefix", + "untracked", + "all_packages", + "show_files", + "groupby", + "sysroot", + ] + assert argspec.defaults == (sys.prefix, False, False, False, "package", "") def test_api_inspect_objects(): argspec = getargspec(api.inspect_objects) - assert argspec.args == ['packages', 'prefix', 'groupby'] - assert argspec.defaults == (sys.prefix, 'filename') + assert argspec.args == ["packages", "prefix", "groupby"] + assert argspec.defaults == (sys.prefix, "filename") def test_api_inspect_prefix_length(): argspec = getargspec(api.inspect_prefix_length) - assert argspec.args == ['packages', 'min_prefix_length'] + assert argspec.args == ["packages", "min_prefix_length"] # hard-coded prefix length as intentional check here assert argspec.defaults == (255,) def test_api_create_metapackage(): argspec = getargspec(api.create_metapackage) - assert argspec.args == ['name', 'version', 'entry_points', 'build_string', 'build_number', - 'dependencies', 'home', 'license_name', 'summary', 'config'] + assert argspec.args == [ + "name", + "version", + "entry_points", + "build_string", + "build_number", + "dependencies", + "home", + "license_name", + "summary", + "config", + ] assert argspec.defaults == ((), None, 0, (), None, None, None, None) - - -def test_api_update_index(): - argspec = getargspec(api.update_index) - assert argspec.args == ['dir_paths', 'config', 'force', 'check_md5', 'remove', 'channel_name', 'subdir', - 'threads', 'patch_generator', "verbose", "progress", "hotfix_source_repo", - 'current_index_versions'] - assert argspec.defaults == (None, False, False, False, None, None, None, None, False, False, None, None) diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index dc973467da..c0e46b7bf3 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -1,18 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import csv -import os +import hashlib import json +import os import tarfile -import hashlib import pytest +from conda.gateways.connection.download import download -from conda_build.conda_interface import download from conda_build import api -from conda_build.utils import package_has_file, on_win +from conda_build.utils import on_win, package_has_file -from .utils import metadata_dir, assert_package_consistency +from .utils import assert_package_consistency, metadata_dir def test_convert_wheel_raises(): @@ -30,31 +30,32 @@ def test_convert_exe_raises(): def assert_package_paths_matches_files(package_path): """Ensure that info/paths.json matches info/files""" with tarfile.open(package_path) as t: - files_content = t.extractfile('info/files').read().decode('utf-8') + files_content = t.extractfile("info/files").read().decode("utf-8") files_set = {line for line in files_content.splitlines() if line} - paths_content = json.loads(t.extractfile('info/paths.json').read().decode('utf-8')) + paths_content = json.loads( + t.extractfile("info/paths.json").read().decode("utf-8") + ) - for path_entry in paths_content['paths']: - assert path_entry['_path'] in files_set - files_set.remove(path_entry['_path']) + for path_entry in paths_content["paths"]: + assert path_entry["_path"] in files_set + files_set.remove(path_entry["_path"]) assert not files_set # Check that we've seen all the entries in files -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_show_imports(testing_workdir, base_platform, package, capfd): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) +def test_show_imports(base_platform, package, capfd): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -65,18 +66,17 @@ def test_show_imports(testing_workdir, base_platform, package, capfd): output, error = capfd.readouterr() # there will be four duplicate outputs since we're converting to four platforms - assert 'import cryptography.hazmat.bindings._constant_time' in output - assert 'import cryptography.hazmat.bindings._openssl' in output - assert 'import cryptography.hazmat.bindings._padding' in output + assert "import cryptography.hazmat.bindings._constant_time" in output + assert "import cryptography.hazmat.bindings._openssl" in output + assert "import cryptography.hazmat.bindings._padding" in output -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py')]) -def test_no_imports_found(testing_workdir, base_platform, package, capfd): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("itsdangerous-0.24", "itsdangerous.py")]) +def test_no_imports_found(base_platform, package, capfd): package_name, example_file = package - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -84,39 +84,39 @@ def test_no_imports_found(testing_workdir, base_platform, package, capfd): api.convert(fn, platforms=None, show_imports=True) output, error = capfd.readouterr() - assert 'No imports found.' in output + assert "No imports found." in output -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_no_platform(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) +def test_no_platform(base_platform, package): package_name, example_file = package - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) with pytest.raises(SystemExit) as e: api.convert(fn, platforms=None) - assert 'Error: --platform option required for conda package conversion.' in str(e.value) + assert "Error: --platform option required for conda package conversion." in str( + e.value + ) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_c_extension_error(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) +def test_c_extension_error(base_platform, package): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -124,187 +124,204 @@ def test_c_extension_error(testing_workdir, base_platform, package): with pytest.raises(SystemExit) as e: api.convert(fn, platforms=platform) - assert ('WARNING: Package {} contains C extensions; skipping conversion. ' - 'Use -f to force conversion.' .format(fn)) in str(e.value) + assert ( + f"WARNING: Package {fn} contains C extensions; skipping conversion. " + "Use -f to force conversion." + ) in str(e.value) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_c_extension_conversion(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) +def test_c_extension_conversion(base_platform, package): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) for platform in platforms: api.convert(fn, platforms=platform, force=True) - assert os.path.exists('{}/{}' .format(platform, fn)) + assert os.path.exists(f"{platform}/{fn}") -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py'), - ('py-1.4.32', 'py/__init__.py')]) -def test_convert_platform_to_others(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize( + "package", + [("itsdangerous-0.24", "itsdangerous.py"), ("py-1.4.32", "py/__init__.py")], +) +def test_convert_platform_to_others(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-py27_0.tar.bz2" fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if subdir != platform: - python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python2.7" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/site-packages/{example_file}') + assert package_has_file( + package, f"{python_folder}/site-packages/{example_file}" + ) if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) @pytest.mark.slow -@pytest.mark.skipif(on_win, reason="we create the pkg to be converted in *nix; don't run on win.") -def test_convert_from_unix_to_win_creates_entry_points(testing_config): +@pytest.mark.skipif( + on_win, reason="we create the pkg to be converted in *nix; don't run on win." +) +def test_convert_from_unix_to_win_creates_entry_points(testing_config, request): recipe_dir = os.path.join(metadata_dir, "entry_points") - fn = api.build(recipe_dir, config=testing_config)[0] - for platform in ['win-64', 'win-32']: + # Recipe "entry_points" is used in other test -> add test-specific variant + # (change build hash) to avoid clashes in package cache from other tests. + variants = {"pytest_name": [request.node.name]} + fn = api.build(recipe_dir, config=testing_config, variants=variants)[0] + for platform in ["win-64", "win-32"]: api.convert(fn, platforms=[platform], force=True) converted_fn = os.path.join(platform, os.path.basename(fn)) assert package_has_file(converted_fn, "Scripts/test-script-manual-script.py") assert package_has_file(converted_fn, "Scripts/test-script-manual.exe") - script_contents = package_has_file(converted_fn, "Scripts/test-script-setup-script.py") + script_contents = package_has_file( + converted_fn, "Scripts/test-script-setup-script.py" + ) assert script_contents assert "Test script setup" in script_contents bat_contents = package_has_file(converted_fn, "Scripts/test-script-setup.exe") assert bat_contents assert_package_consistency(converted_fn) - paths_content = json.loads(package_has_file(converted_fn, 'info/paths.json')) + paths_content = json.loads(package_has_file(converted_fn, "info/paths.json")) # Check the validity of the sha and filesize of the converted scripts with tarfile.open(converted_fn) as t: - for f in paths_content['paths']: - if f['_path'].startswith('Scripts/') and f['_path'].endswith('-script.py'): - script_content = package_has_file(converted_fn, f['_path']) - if hasattr(script_content, 'encode'): + for f in paths_content["paths"]: + if f["_path"].startswith("Scripts/") and f["_path"].endswith( + "-script.py" + ): + script_content = package_has_file(converted_fn, f["_path"]) + if hasattr(script_content, "encode"): script_content = script_content.encode() - assert f['sha256'] == hashlib.sha256(script_content).hexdigest() - assert f['size_in_bytes'] == t.getmember(f['_path']).size + assert f["sha256"] == hashlib.sha256(script_content).hexdigest() + assert f["size_in_bytes"] == t.getmember(f["_path"]).size - paths_list = {f['_path'] for f in paths_content['paths']} - files = {p for p in package_has_file(converted_fn, 'info/files').splitlines()} + paths_list = {f["_path"] for f in paths_content["paths"]} + files = {p for p in package_has_file(converted_fn, "info/files").splitlines()} assert files == paths_list - index = json.loads(package_has_file(converted_fn, 'info/index.json')) - assert index['subdir'] == platform + index = json.loads(package_has_file(converted_fn, "info/index.json")) + assert index["subdir"] == platform has_prefix_files = package_has_file(converted_fn, "info/has_prefix") - fieldnames = ['prefix', 'type', 'path'] + fieldnames = ["prefix", "type", "path"] csv_dialect = csv.Sniffer().sniff(has_prefix_files) - csv_dialect.lineterminator = '\n' - has_prefix_files = csv.DictReader(has_prefix_files.splitlines(), fieldnames=fieldnames, - dialect=csv_dialect) - has_prefix_files = {d['path']: d for d in has_prefix_files} + csv_dialect.lineterminator = "\n" + has_prefix_files = csv.DictReader( + has_prefix_files.splitlines(), fieldnames=fieldnames, dialect=csv_dialect + ) + has_prefix_files = {d["path"]: d for d in has_prefix_files} assert len(has_prefix_files) == 4 - assert 'Scripts/test-script-script.py' in has_prefix_files - assert 'Scripts/test-script-setup-script.py' in has_prefix_files - assert 'Scripts/test-script-manual-script.py' in has_prefix_files - assert 'Scripts/test-script-manual-postfix-script.py' in has_prefix_files + assert "Scripts/test-script-script.py" in has_prefix_files + assert "Scripts/test-script-setup-script.py" in has_prefix_files + assert "Scripts/test-script-manual-script.py" in has_prefix_files + assert "Scripts/test-script-manual-postfix-script.py" in has_prefix_files -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_convert_dependencies(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) +def test_convert_dependencies(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - dependencies = ['numpy 1.7.1 py36_0', 'cryptography 1.7.0 py36_0'] - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', dependencies=dependencies, quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + dependencies = ["numpy 1.7.1 py36_0", "cryptography 1.7.0 py36_0"] + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert( + fn, platforms="all", dependencies=dependencies, quiet=False, verbose=False + ) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if platform != subdir: - python_folder = 'lib/python3.6' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python3.6" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/{example_file}') + assert package_has_file(package, f"{python_folder}/{example_file}") with tarfile.open(package) as t: - info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) + info = json.loads( + t.extractfile("info/index.json").read().decode("utf-8") + ) - assert 'numpy 1.7.1 py36_0' in info['depends'] - assert 'numpy 1.12.1 py36_0' not in info['depends'] - assert 'cryptography 1.7.0 py36_0' in info['depends'] - assert 'cryptography 1.8.1 py36_0' not in info['depends'] + assert "numpy 1.7.1 py36_0" in info["depends"] + assert "numpy 1.12.1 py36_0" not in info["depends"] + assert "cryptography 1.7.0 py36_0" in info["depends"] + assert "cryptography 1.8.1 py36_0" not in info["depends"] if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_convert_no_dependencies(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) +def test_convert_no_dependencies(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', dependencies=None, quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", dependencies=None, quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if platform != subdir: - python_folder = 'lib/python3.6' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python3.6" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/{example_file}') + assert package_has_file(package, f"{python_folder}/{example_file}") with tarfile.open(package) as t: - info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) + info = json.loads( + t.extractfile("info/index.json").read().decode("utf-8") + ) - assert 'numpy 1.12.1 py36_0' in info['depends'] - assert 'cryptography 1.8.1 py36_0' in info['depends'] + assert "numpy 1.12.1 py36_0" in info["depends"] + assert "cryptography 1.8.1 py36_0" in info["depends"] if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_skip_conversion(testing_workdir, base_platform, package, capfd): +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) +def test_skip_conversion(base_platform, package, capfd): package_name, example_file = package - source_plat_arch = '{}-64' .format(base_platform) + source_plat_arch = f"{base_platform}-64" - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-np112py36_0.tar.bz2'.format(base_platform, - package_name) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - api.convert(fn, platforms=source_plat_arch, dependencies=None, quiet=False, verbose=False) + api.convert( + fn, platforms=source_plat_arch, dependencies=None, quiet=False, verbose=False + ) output, error = capfd.readouterr() - skip_message = ("Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion.\n" - .format(source_plat_arch, source_plat_arch)) + skip_message = ( + f"Source platform '{source_plat_arch}' and target platform '{source_plat_arch}' are identical. " + "Skipping conversion.\n" + ) package = os.path.join(source_plat_arch, fn) @@ -312,9 +329,9 @@ def test_skip_conversion(testing_workdir, base_platform, package, capfd): assert not os.path.exists(package) -@pytest.mark.parametrize('base_platform', ['linux', 'osx']) -@pytest.mark.parametrize('package', [('sparkmagic-0.12.1', '')]) -def test_renaming_executables(testing_workdir, base_platform, package): +@pytest.mark.parametrize("base_platform", ["linux", "osx"]) +@pytest.mark.parametrize("package", [("sparkmagic-0.12.1", "")]) +def test_renaming_executables(base_platform, package): """Test that the files in /bin are properly renamed. When converting the bin/ directory to Scripts/, only scripts @@ -325,17 +342,16 @@ def test_renaming_executables(testing_workdir, base_platform, package): the same. """ package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-py27_0.tar.bz2" fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if subdir != platform: package = os.path.join(platform, fn) if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) diff --git a/tests/test_api_debug.py b/tests/test_api_debug.py index 10415c15d1..af24d8acfb 100644 --- a/tests/test_api_debug.py +++ b/tests/test_api_debug.py @@ -5,118 +5,110 @@ should go in test_render.py """ -import os -from glob import glob +from __future__ import annotations -import pytest import subprocess +from contextlib import nullcontext +from pathlib import Path -import sys - -from conda_build import api -from tests import utils - -from .utils import metadata_dir, thisdir, on_win - -recipe_path = os.path.join(metadata_dir, "_debug_pkg") -ambiguous_recipe_path = os.path.join(metadata_dir, "_debug_pkg_multiple_outputs") -tarball_path = os.path.join(thisdir, "archives", "test_debug_pkg-1.0-0.tar.bz2") - -if on_win: - shell_cmd = ["cmd.exe", "/d", "/c"] -else: - shell_cmd = ["bash", "-c"] - - -def assert_correct_folders(work_dir, build=True): - base_dir = os.path.dirname(work_dir) - build_set = "_b*", "_h*" - test_set = "_t*", "test_tmp" - for prefix in build_set: - assert bool(glob(os.path.join(base_dir, prefix))) == build - for prefix in test_set: - assert bool(glob(os.path.join(base_dir, prefix))) != build - - -def check_build_files_present(work_dir, build=True): - if on_win: - assert os.path.exists(os.path.join(work_dir, "bld.bat")) == build - else: - assert os.path.exists(os.path.join(work_dir, "conda_build.sh")) == build - - -def check_test_files_present(work_dir, test=True): - if on_win: - assert os.path.exists(os.path.join(work_dir, "conda_test_runner.bat")) == test - else: - assert os.path.exists(os.path.join(work_dir, "conda_test_runner.sh")) == test - - -@pytest.mark.slow -def test_debug_recipe_default_path(testing_config): - activation_string = api.debug(recipe_path, config=testing_config) - assert activation_string and "debug_1" in activation_string - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir) - - -@pytest.mark.skipif( - utils.on_win and sys.version_info <= (3, 4), - reason="Skipping on windows and vc<14" +import pytest +from conda.common.compat import on_win + +from conda_build.api import debug + +from .utils import archive_path, metadata_path + +DEBUG_PKG = metadata_path / "_debug_pkg" +MULTI_OUT = metadata_path / "_debug_pkg_multiple_outputs" +TARBALL = archive_path / "test_debug_pkg-1.0-0.tar.bz2" +SHELL_CMD = ("cmd.exe", "/d", "/c") if on_win else ("bash", "-c") + + +@pytest.mark.parametrize( + "recipe,path,config,output_id,has_error,has_build", + [ + # w/ config + pytest.param(DEBUG_PKG, False, True, None, False, True, id="recipe w/ config"), + pytest.param(TARBALL, False, True, None, False, False, id="tarball w/ config"), + # w/ path + pytest.param(DEBUG_PKG, True, False, None, False, True, id="recipe w/ path"), + pytest.param(TARBALL, True, False, None, False, False, id="tarball w/ path"), + # w/ outputs + pytest.param( + MULTI_OUT, + False, + False, + "output1*", + False, + True, + id="outputs w/ valid filtering", + ), + pytest.param( + MULTI_OUT, + False, + False, + None, + True, + False, + id="outputs w/ no filtering", + ), + pytest.param( + MULTI_OUT, + False, + False, + "frank", + True, + False, + id="outputs w/ invalid filtering", + ), + ], ) -def test_debug_package_default_path(testing_config): - activation_string = api.debug(tarball_path, config=testing_config) - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, False) - check_test_files_present(work_dir, True) - assert_correct_folders(work_dir, build=False) - - -@pytest.mark.slow -def test_debug_recipe_custom_path(testing_workdir): - activation_string = api.debug(recipe_path, path=testing_workdir) - assert activation_string and "debug_1" not in activation_string - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir) - - -def test_debug_package_custom_path(testing_workdir): - activation_string = api.debug(tarball_path, path=testing_workdir) - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, False) - check_test_files_present(work_dir, True) - assert_correct_folders(work_dir, build=False) - - -def test_specific_output(): - activation_string = api.debug(ambiguous_recipe_path, output_id="output1*") - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir, build=True) - - -@pytest.mark.sanity -def test_error_on_ambiguous_output(): - with pytest.raises(ValueError): - api.debug(ambiguous_recipe_path) - - -@pytest.mark.sanity -def test_error_on_unmatched_output(): - with pytest.raises(ValueError): - api.debug(ambiguous_recipe_path, output_id="frank") +def test_debug( + recipe: Path, + path: bool, + config: bool, + output_id: str | None, + has_error: bool, + has_build: bool, + tmp_path: Path, + testing_config, +): + with pytest.raises(ValueError) if has_error else nullcontext(): + activation = debug( + str(recipe), + path=tmp_path if path else None, + config=testing_config if config else None, + output_id=output_id, + ) + + # if we expected an error there wont be anything else to test + if has_error: + return + + # e.g.: activation = "cd /path/to/work && source /path/to/work/build_env_setup.sh" + _, work_dir, _, source, script = activation.split() + work_path = Path(work_dir) + + # recipes and tarballs are installed into different locations + if recipe.suffixes[-2:] == [".tar", ".bz2"]: + assert work_path.name == "test_tmp" + elif path: + assert work_path.parent == tmp_path + else: + assert work_path.parent.name.startswith("debug_") + + # check build files are present + name = "bld.bat" if on_win else "conda_build.sh" + assert (work_path / name).exists() is has_build + for prefix in ("_b*", "_h*"): + assert bool(next(work_path.parent.glob(prefix), False)) is has_build + + # check test files are present + name = f"conda_test_runner{('.bat' if on_win else '.sh')}" + has_test = not has_build + assert (work_path / name).exists() is has_test + for prefix in ("_t*", "test_tmp"): + assert bool(next(work_path.parent.glob(prefix), False)) is has_test + + # ensure it's possible to activate the environment + subprocess.check_call([*SHELL_CMD, f"{source} {script}"], cwd=work_path) diff --git a/tests/test_api_inspect.py b/tests/test_api_inspect.py index f3fc6411a2..f667867610 100644 --- a/tests/test_api_inspect.py +++ b/tests/test_api_inspect.py @@ -5,25 +5,11 @@ import pytest from conda_build import api -from .utils import metadata_dir -thisdir = os.path.dirname(os.path.abspath(__file__)) +from .utils import metadata_dir @pytest.mark.sanity def test_check_recipe(): """Technically not inspect, but close enough to belong here""" assert api.check(os.path.join(metadata_dir, "source_git_jinja2")) - - -# These tests are already being done in test_cli.py. If we have a better way to test, move here. -def test_inpect_linkages(): - pass - - -def test_inspect_objects(): - pass - - -def test_installable(): - pass diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 452a642e43..0882de0df1 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -7,20 +7,20 @@ import os import re -import sys +from itertools import count, islice -from unittest import mock import pytest import yaml +from conda.base.context import context +from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import subdir, cc_conda_build -from tests import utils +from conda_build.variants import validate_spec -from .utils import metadata_dir, thisdir +from .utils import metadata_dir, variants_dir -def test_render_need_download(testing_workdir, testing_config): +def test_render_need_download(testing_config): # first, test that the download/render system renders all it can, # and accurately returns its needs @@ -28,7 +28,8 @@ def test_render_need_download(testing_workdir, testing_config): metadata, need_download, need_reparse_in_env = api.render( os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, - no_download_source=True)[0] + no_download_source=True, + )[0] assert need_download assert need_reparse_in_env @@ -37,15 +38,16 @@ def test_render_need_download(testing_workdir, testing_config): os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, no_download_source=False, - finalize=False)[0] + finalize=False, + )[0] assert not need_download assert metadata.meta["package"]["version"] == "1.20.2" def test_render_yaml_output(testing_workdir, testing_config): metadata, need_download, need_reparse_in_env = api.render( - os.path.join(metadata_dir, "source_git_jinja2"), - config=testing_config)[0] + os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config + )[0] yaml_metadata = api.output_yaml(metadata) assert "package:" in yaml_metadata @@ -54,170 +56,211 @@ def test_render_yaml_output(testing_workdir, testing_config): assert "package:" in open(os.path.join(testing_workdir, "output.yaml")).read() -def test_get_output_file_path(testing_workdir, testing_metadata): +def test_get_output_file_paths(testing_workdir, testing_metadata): testing_metadata = render.finalize_metadata(testing_metadata) - api.output_yaml(testing_metadata, 'recipe/meta.yaml') + api.output_yaml(testing_metadata, "recipe/meta.yaml") - build_path = api.get_output_file_paths(os.path.join(testing_workdir, 'recipe'), - config=testing_metadata.config, - no_download_source=True)[0] - assert build_path == os.path.join(testing_metadata.config.croot, - testing_metadata.config.host_subdir, - "test_get_output_file_path-1.0-1.tar.bz2") + build_path = api.get_output_file_paths( + os.path.join(testing_workdir, "recipe"), + config=testing_metadata.config, + no_download_source=True, + )[0] + assert build_path == os.path.join( + testing_metadata.config.croot, + testing_metadata.config.host_subdir, + "test_get_output_file_paths-1.0-1.tar.bz2", + ) -def test_get_output_file_path_metadata_object(testing_metadata): +def test_get_output_file_paths_metadata_object(testing_metadata): testing_metadata.final = True build_path = api.get_output_file_paths(testing_metadata)[0] - assert build_path == os.path.join(testing_metadata.config.croot, - testing_metadata.config.host_subdir, - "test_get_output_file_path_metadata_object-1.0-1.tar.bz2") + assert build_path == os.path.join( + testing_metadata.config.croot, + testing_metadata.config.host_subdir, + "test_get_output_file_paths_metadata_object-1.0-1.tar.bz2", + ) -def test_get_output_file_path_jinja2(testing_workdir, testing_config): +def test_get_output_file_paths_jinja2(testing_config): # If this test does not raise, it's an indicator that the workdir is not # being cleaned as it should. recipe = os.path.join(metadata_dir, "source_git_jinja2") # First get metadata with a recipe that is known to need a download: with pytest.raises((ValueError, SystemExit)): - build_path = api.get_output_file_paths(recipe, - config=testing_config, - no_download_source=True)[0] + build_path = api.get_output_file_paths( + recipe, config=testing_config, no_download_source=True + )[0] metadata, need_download, need_reparse_in_env = api.render( - recipe, - config=testing_config, - no_download_source=False)[0] + recipe, config=testing_config, no_download_source=False + )[0] build_path = api.get_output_file_paths(metadata)[0] _hash = metadata.hash_dependencies() - python = ''.join(metadata.config.variant['python'].split('.')[:2]) - assert build_path == os.path.join(testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-" - "py{}{}_0_g262d444.tar.bz2".format(python, _hash)) + python = "".join(metadata.config.variant["python"].split(".")[:2]) + assert build_path == os.path.join( + testing_config.croot, + testing_config.host_subdir, + f"conda-build-test-source-git-jinja2-1.20.2-py{python}{_hash}_0_g262d444.tar.bz2", + ) -@mock.patch('conda_build.source') -def test_output_without_jinja_does_not_download(mock_source, testing_workdir, testing_config): - api.get_output_file_path(os.path.join(metadata_dir, "source_git"), config=testing_config)[0] - mock_source.provide.assert_not_called() +def test_output_without_jinja_does_not_download(mocker, testing_config): + mock = mocker.patch("conda_build.source") + api.get_output_file_paths( + os.path.join(metadata_dir, "source_git"), config=testing_config + ) + mock.assert_not_called() def test_pin_compatible_semver(testing_config): - recipe_dir = os.path.join(metadata_dir, '_pin_compatible') + recipe_dir = os.path.join(metadata_dir, "_pin_compatible") metadata = api.render(recipe_dir, config=testing_config)[0][0] - assert 'zlib >=1.2.11,<2.0a0' in metadata.get_value('requirements/run') + assert "zlib >=1.2.11,<2.0a0" in metadata.get_value("requirements/run") @pytest.mark.slow -@pytest.mark.skipif( - utils.on_win and sys.version_info < (3, 6), - reason="Failing tests on CI for Python 2.7" -) -@pytest.mark.xfail(sys.platform == "win32", - reason="Defaults channel has conflicting vc packages") +@pytest.mark.xfail(on_win, reason="Defaults channel has conflicting vc packages") def test_resolved_packages_recipe(testing_config): - recipe_dir = os.path.join(metadata_dir, '_resolved_packages_host_build') + recipe_dir = os.path.join(metadata_dir, "_resolved_packages_host_build") metadata = api.render(recipe_dir, config=testing_config)[0][0] - assert all(len(pkg.split()) == 3 for pkg in metadata.get_value('requirements/run')) - run_requirements = {x.split()[0] for x in metadata.get_value('requirements/run')} + assert all(len(pkg.split()) == 3 for pkg in metadata.get_value("requirements/run")) + run_requirements = {x.split()[0] for x in metadata.get_value("requirements/run")} for package in [ - 'curl', # direct dependency - 'numpy', # direct dependency - 'zlib', # indirect dependency of curl - 'python', # indirect dependency of numpy + "curl", # direct dependency + "numpy", # direct dependency + "zlib", # indirect dependency of curl + "python", # indirect dependency of numpy ]: assert package in run_requirements @pytest.mark.slow def test_host_entries_finalized(testing_config): - recipe = os.path.join(metadata_dir, '_host_entries_finalized') - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - outputs = api.get_output_file_paths(metadata) - assert any('py27' in out for out in outputs) - assert any('py39' in out for out in outputs) + recipe = os.path.join(metadata_dir, "_host_entries_finalized") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + outputs = api.get_output_file_paths(metadata_tuples) + assert any("py27" in out for out in outputs) + assert any("py39" in out for out in outputs) def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir): - testing_metadata.meta['build']['string'] = 'steve' - testing_metadata.meta['requirements']['build'] = ['zlib 1.2.8'] + testing_metadata.meta["build"]["string"] = "steve" + testing_metadata.meta["requirements"]["build"] = ["zlib 1.2.8"] - api.output_yaml(testing_metadata, 'meta.yaml') + api.output_yaml(testing_metadata, "meta.yaml") metadata = api.render(testing_workdir)[0][0] - assert metadata.build_id() == 'steve' + assert metadata.build_id() == "steve" def test_pin_depends(testing_config): """This is deprecated functionality - replaced by the more general variants pinning scheme""" - recipe = os.path.join(metadata_dir, '_pin_depends_strict') - m = api.render(recipe, config=testing_config)[0][0] + recipe = os.path.join(metadata_dir, "_pin_depends_strict") + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. - assert any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) + assert any( + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] + ) def test_cross_recipe_with_only_build_section(testing_config): - recipe = os.path.join(metadata_dir, '_cross_prefix_elision_compiler_used') + recipe = os.path.join(metadata_dir, "_cross_prefix_elision_compiler_used") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] - assert metadata.config.host_subdir != subdir + assert metadata.config.host_subdir != context.subdir assert metadata.config.build_prefix != metadata.config.host_prefix assert not metadata.build_is_host def test_cross_info_index_platform(testing_config): - recipe = os.path.join(metadata_dir, '_cross_build_unix_windows') + recipe = os.path.join(metadata_dir, "_cross_build_unix_windows") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] info_index = metadata.info_index() - assert metadata.config.host_subdir != subdir - assert metadata.config.host_subdir == info_index['subdir'] + assert metadata.config.host_subdir != context.subdir + assert metadata.config.host_subdir == info_index["subdir"] assert metadata.config.host_platform != metadata.config.platform - assert metadata.config.host_platform == info_index['platform'] + assert metadata.config.host_platform == info_index["platform"] + + +def test_noarch_with_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_platform_deps") + build_ids = {} + for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: + platform, arch = subdir_.split("-") + metadata = api.render( + recipe_path, config=testing_config, platform=platform, arch=arch + )[0][0] + build_ids[subdir_] = metadata.build_id() + + # one hash for each platform, plus one for the archspec selector + assert len(set(build_ids.values())) == 4 + assert build_ids["linux-64"] == build_ids["linux-aarch64"] + assert ( + build_ids["linux-64"] != build_ids["linux-ppc64le"] + ) # not the same due to archspec + +def test_noarch_with_no_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") + build_ids = set() + for platform in ["osx", "linux", "win"]: + metadata = api.render(recipe_path, config=testing_config, platform=platform)[0][ + 0 + ] + build_ids.add(metadata.build_id()) -def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): - os.makedirs('config') + assert len(build_ids) == 1 + + +def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): + os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it - python_versions = ['2.6', '3.4', '3.10'] - config = {'python': python_versions, - 'bzip2': ['0.9', '1.0']} - with open(os.path.join('config', 'conda_build_config.yaml'), 'w') as f: + python_versions = ["2.6", "3.4", "3.11"] + config = {"python": python_versions, "bzip2": ["0.9", "1.0"]} + with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) - cc_conda_build_backup = cc_conda_build.copy() - # hacky equivalent of changing condarc - # careful, this is global and affects other tests! make sure to clear it! - cc_conda_build.update({'config_file': '${TEST_WORKDIR}/config/conda_build_config.yaml'}) - - os.environ['TEST_WORKDIR'] = testing_workdir - try: - m = api.render(os.path.join(thisdir, 'test-recipes', 'variants', '19_used_variables'), - bypass_env_check=True, finalize=False)[0][0] - # this one should have gotten clobbered by the values in the recipe - assert m.config.variant['python'] not in python_versions - # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants['bzip2']) == 2 - finally: - cc_conda_build.clear() - cc_conda_build.update(cc_conda_build_backup) + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={ + "config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml", + **context.conda_build, + }, + ) + + os.environ["TEST_WORKDIR"] = testing_workdir + metadata = api.render( + os.path.join(variants_dir, "19_used_variables"), + bypass_env_check=True, + finalize=False, + )[0][0] + # this one should have gotten clobbered by the values in the recipe + assert metadata.config.variant["python"] not in python_versions + # this confirms that we loaded the config file correctly + assert len(metadata.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): - recipe = os.path.join(metadata_dir, '_self_reference_run_exports') - m = api.render(recipe)[0][0] - run_exports = m.meta.get('build', {}).get('run_exports', []) + recipe = os.path.join(metadata_dir, "_self_reference_run_exports") + metadata = api.render(recipe)[0][0] + run_exports = metadata.meta.get("build", {}).get("run_exports", []) assert run_exports assert len(run_exports) == 1 - assert run_exports[0].split()[1] == '>=1.0.0,<2.0a0' + assert run_exports[0].split()[1] == ">=1.0.0,<2.0a0" def test_run_exports_with_pin_compatible_in_subpackages(testing_config): - recipe = os.path.join(metadata_dir, '_run_exports_in_outputs') - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - if m.name().startswith('gfortran_'): - run_exports = set(m.meta.get('build', {}).get('run_exports', {}).get('strong', [])) + recipe = os.path.join(metadata_dir, "_run_exports_in_outputs") + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + if metadata.name().startswith("gfortran_"): + run_exports = set( + metadata.meta.get("build", {}).get("run_exports", {}).get("strong", []) + ) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the # compatibility bound info. This is generally due to lack of rendering of an output, such that the @@ -225,43 +268,76 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): assert all(len(export.split()) > 1 for export in run_exports), run_exports -def test_ignore_build_only_deps(testing_config): - ms = api.render(os.path.join(thisdir, 'test-recipes', 'variants', 'python_in_build_only'), - bypass_env_check=True, finalize=False) - assert len(ms) == 1 - - -def test_merge_build_host_build_key(testing_workdir, testing_metadata): - m = api.render(os.path.join(metadata_dir, '_no_merge_build_host'))[0][0] - assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) +def test_ignore_build_only_deps(): + metadata_tuples = api.render( + os.path.join(variants_dir, "python_in_build_only"), + bypass_env_check=True, + finalize=False, + ) + assert len(metadata_tuples) == 1 -def test_merge_build_host_empty_host_section(testing_config): - m = api.render(os.path.join(metadata_dir, '_empty_host_avoids_merge'))[0][0] - assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) +def test_merge_build_host_build_key(): + metadata = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) -@pytest.mark.skipif(sys.platform != "linux2", reason="package on remote end is only on linux") -@pytest.mark.xfail(reason="It needs to be fixed for Python v2.7. #3681") -def test_run_exports_from_repo_without_channeldata(testing_config): - ms = api.render(os.path.join(metadata_dir, '_run_export_no_channeldata'), config=testing_config) - assert ms[0][0].meta['requirements']['build'] == ["exporty"] - # these two will be missing if run_exports has failed. - assert ms[0][0].meta['requirements']['host'] == ["exporty"] - assert ms[0][0].meta['requirements']['run'] == ["exporty"] +def test_merge_build_host_empty_host_section(): + metadata = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_pin_expression_works_with_prereleases(testing_config): - recipe = os.path.join(metadata_dir, '_pinning_prerelease') - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta['package']['name'] == 'bar') - assert 'foo >=3.10.0.rc1,<3.11.0a0' in m.meta['requirements']['run'] + recipe = os.path.join(metadata_dir, "_pinning_prerelease") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "foo >=3.10.0.rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] def test_pin_expression_works_with_python_prereleases(testing_config): - recipe = os.path.join(metadata_dir, '_pinning_prerelease_python') - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta['package']['name'] == 'bar') - assert 'python >=3.10.0rc1,<3.11.0a0' in m.meta['requirements']['run'] + recipe = os.path.join(metadata_dir, "_pinning_prerelease_python") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "python >=3.10.0rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] + + +@pytest.mark.benchmark +def test_pin_subpackage_benchmark(testing_config): + # Performance regression test for https://github.com/conda/conda-build/pull/5224 + recipe = os.path.join(metadata_dir, "_pin_subpackage_benchmark") + + # Create variant config of size comparable (for subdir linux-64) to + # https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/3c7d60f56a8cb7d1b8f5a8da0b02ae1f1f0982d7/recipe/conda_build_config.yaml + # Addendum: Changed number of single-value keys from 327 to 33 to reduce benchmark duration. + def create_variants(): + # ("pkg_1, ("1.1", "1.2", ...)), ("pkg_2", ("2.1", "2.2", ...)), ... + packages = ((f"pkg_{i}", (f"{i}.{j}" for j in count(1))) for i in count(1)) + variant = {} + variant["zip_keys"] = [] + for version_count, package_count in [(1, 4), (4, 3), (4, 3)]: + zipped = [] + for package, versions in islice(packages, package_count): + zipped.append(package) + variant[package] = list(islice(versions, version_count)) + variant["zip_keys"].append(zipped) + # for version_count, package_count in [(3, 1), (2, 4), (1, 327)]: + for version_count, package_count in [(3, 1), (2, 4), (1, 33)]: + for package, versions in islice(packages, package_count): + variant[package] = list(islice(versions, version_count)) + validate_spec("", variant) + return variant + + metadata_tuples = api.render( + recipe, config=testing_config, channels=[], variants=create_variants() + ) + assert len(metadata_tuples) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 2c1a3b0001..963312ee44 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -1,166 +1,249 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import fnmatch +from __future__ import annotations + import os import subprocess import sys +from pathlib import Path +from typing import TYPE_CHECKING -from pkg_resources import parse_version import pytest - -from conda_build.skeletons.pypi import get_package_metadata, \ - get_entry_points, is_setuptools_enabled, convert_to_flat_list, \ - get_dependencies, get_import_tests, get_tests_require, get_home, \ - get_summary, get_license_name, clean_license_name - -try: - import ruamel_yaml -except ImportError: - try: - import ruamel.yaml as ruamel_yaml - except ImportError: - raise ImportError("No ruamel_yaml library available.\n" - "To proceed, conda install ruamel_yaml") +import ruamel.yaml from conda_build import api -from conda_build.exceptions import DependencyNeedsBuildingError -import conda_build.os_utils.external as external +from conda_build.skeletons.pypi import ( + clean_license_name, + convert_to_flat_list, + get_dependencies, + get_entry_points, + get_home, + get_import_tests, + get_license_name, + get_package_metadata, + get_summary, + get_tests_require, + is_setuptools_enabled, +) from conda_build.utils import on_win +from conda_build.version import _parse as parse_version -thisdir = os.path.dirname(os.path.realpath(__file__)) - -repo_packages = [('', 'pypi', 'pip', '8.1.2'), - ('r', 'cran', 'acs', ''), - ( - 'r', 'cran', - 'https://github.com/twitter/AnomalyDetection.git', - ''), - ('perl', 'cpan', 'Moo', ''), - ('', 'rpm', 'libX11-devel', ''), - # ('lua', luarocks', 'LuaSocket', ''), - ] - - -@pytest.mark.parametrize("prefix, repo, package, version", repo_packages) -def test_repo(prefix, repo, package, version, testing_workdir, testing_config): - api.skeletonize(package, repo, version=version, output_dir=testing_workdir, - config=testing_config) - try: - base_package, _ = os.path.splitext(os.path.basename(package)) - package_name = "-".join( - [prefix, base_package]) if prefix else base_package - contents = os.listdir(testing_workdir) - assert len([content for content in contents - if content.startswith(package_name.lower()) and - os.path.isdir(os.path.join(testing_workdir, content))]) - except: - print(os.listdir(testing_workdir)) - raise +if TYPE_CHECKING: + from conda_build.config import Config -@pytest.mark.slow -def test_name_with_version_specified(testing_workdir, testing_config): - api.skeletonize(packages='sympy', repo='pypi', version='0.7.5', - config=testing_config) - m = api.render('sympy/meta.yaml')[0][0] - assert m.version() == "0.7.5" - - -def test_pypi_url(testing_workdir, testing_config): - api.skeletonize('https://pypi.python.org/packages/source/s/sympy/' - 'sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9', - repo='pypi', config=testing_config) - m = api.render('sympy/meta.yaml')[0][0] - assert m.version() == "0.7.5" - +SYMPY_URL = ( + "https://files.pythonhosted.org/packages/7d/23/70fa970c07f0960f7543af982d2554be805e1034b9dcee9cb3082ce80f80/sympy-1.10.tar.gz" + "#sha256=6cf85a5cfe8fff69553e745b05128de6fc8de8f291965c63871c79701dc6efc9" +) -@pytest.fixture -def url_pylint_package(): - return "https://pypi.python.org/packages/source/p/pylint/pylint-2.3.1.tar.gz#" \ - "sha256=723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" +PYLINT_VERSION = "2.7.4" # last version to use setup.py without setup.cfg +PYLINT_HASH_TYPE = "sha256" +PYLINT_SHA256 = "bd38914c7731cdc518634a8d3c5585951302b6e2b6de60fbb3f7a0220e21eeee" +PYLINT_BLAKE2 = "2d5b491cf9e85288c29759a6535e6009938c2141b137b27a0653e435dcbad6a2" +PYLINT_FILENAME = f"pylint-{PYLINT_VERSION}.tar.gz" +PYLINT_URL = f"https://files.pythonhosted.org/packages/{PYLINT_BLAKE2[:2]}/{PYLINT_BLAKE2[2:4]}/{PYLINT_BLAKE2[4:]}/{PYLINT_FILENAME}" @pytest.fixture -def mock_metada_pylint(url_pylint_package): - import re - - version, hash_type, hash_value = re.findall( - r"pylint-(.*).tar.gz#(.*)=(.*)$", url_pylint_package - )[0] - +def mock_metadata(): return { - 'run_depends': '', - 'build_depends': '', - 'entry_points': '', - 'test_commands': '', - 'tests_require': '', - 'version': 'UNKNOWN', - 'pypiurl': url_pylint_package, - 'filename': f"black-{version}.tar.gz", - 'digest': [hash_type, hash_value], - 'import_tests': '', - 'summary': '' + "run_depends": "", + "build_depends": "", + "entry_points": "", + "test_commands": "", + "tests_require": "", + "version": "UNKNOWN", + "pypiurl": PYLINT_URL, + "filename": PYLINT_FILENAME, + "digest": [PYLINT_HASH_TYPE, PYLINT_SHA256], + "import_tests": "", + "summary": "", } @pytest.fixture -def pkginfo_pylint(url_pylint_package): +def pylint_pkginfo(): # Hardcoding it to avoid to use the get_pkginfo because it takes too much time return { - 'classifiers': [ - 'Development Status :: 6 - Mature', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License (GPL)', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Topic :: Software Development :: Debuggers', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing' + "classifiers": [ + "Development Status :: 6 - Mature", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU General Public License (GPL)", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Debuggers", + "Topic :: Software Development :: Quality Assurance", + "Topic :: Software Development :: Testing", ], - 'entry_points': { - 'console_scripts': [ - 'pylint = pylint:run_pylint', - 'epylint = pylint:run_epylint', - 'pyreverse = pylint:run_pyreverse', - 'symilar = pylint:run_symilar' + "entry_points": { + "console_scripts": [ + "pylint = pylint:run_pylint", + "epylint = pylint:run_epylint", + "pyreverse = pylint:run_pyreverse", + "symilar = pylint:run_symilar", ] }, - 'extras_require': {':sys_platform=="win32"': ['colorama']}, - 'home': 'https://github.com/PyCQA/pylint', - 'install_requires': [ - 'astroid>=2.2.0,<3', 'isort>=4.2.5,<5', 'mccabe>=0.6,<0.7' + "extras_require": {':sys_platform=="win32"': ["colorama"]}, + "home": "https://github.com/PyCQA/pylint", + "install_requires": [ + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", + "mccabe >=0.6,<0.7", + "toml >=0.7.1", + ], + "license": "GPL", + "name": "pylint", + "packages": [ + "pylint", + "pylint.checkers", + "pylint.checkers.refactoring", + "pylint.config", + "pylint.extensions", + "pylint.lint", + "pylint.message", + "pylint.pyreverse", + "pylint.reporters", + "pylint.reporters.ureports", + "pylint.testutils", + "pylint.utils", + ], + "setuptools": True, + "summary": "python code static checker", + "tests_require": ["pytest", "pytest-benchmark"], + "version": "2.3.1", + } + + +@pytest.fixture +def pylint_metadata(): + return { + "run_depends": [ + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", + "mccabe >=0.6,<0.7", + "toml >=0.7.1", + ], + "build_depends": [ + "pip", + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", + "mccabe >=0.6,<0.7", + "toml >=0.7.1", + ], + "entry_points": [ + "pylint = pylint:run_pylint", + "epylint = pylint:run_epylint", + "pyreverse = pylint:run_pyreverse", + "symilar = pylint:run_symilar", + ], + "test_commands": [ + "pylint --help", + "epylint --help", + "pyreverse --help", + "symilar --help", ], - 'license': 'GPL', - 'name': 'pylint', - 'packages': [ - 'pylint', 'pylint.checkers', 'pylint.pyreverse', - 'pylint.extensions', 'pylint.reporters', 'pylint.reporters.ureports' + "tests_require": ["pytest", "pytest-benchmark"], + "version": PYLINT_VERSION, + "pypiurl": PYLINT_URL, + "filename": PYLINT_FILENAME, + "digest": [PYLINT_HASH_TYPE, PYLINT_SHA256], + "import_tests": [ + "pylint", + "pylint.checkers", + "pylint.checkers.refactoring", + "pylint.config", + "pylint.extensions", + "pylint.lint", + "pylint.message", + "pylint.pyreverse", + "pylint.reporters", + "pylint.reporters.ureports", + "pylint.testutils", + "pylint.utils", ], - 'setuptools': True, - 'summary': 'python code static checker', - 'tests_require': ['pytest'], - 'version': '2.3.1' + "summary": "python code static checker", + "packagename": "pylint", + "home": "https://github.com/PyCQA/pylint", + "license": "GNU General Public (GPL)", + "license_family": "LGPL", } -def test_get_entry_points(testing_workdir, pkginfo_pylint, - result_metadata_pylint): - pkginfo = pkginfo_pylint +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") +@pytest.mark.parametrize( + "prefix, repo, package, version", + [ + ("", "pypi", "pip", "8.1.2"), + ("r-", "cran", "acs", None), + ("r-", "cran", "https://github.com/twitter/AnomalyDetection.git", None), + ("perl-", "cpan", "Moo", None), + ("", "rpm", "libX11-devel", None), + # skeleton("luarocks") appears broken and needs work + # https://github.com/conda/conda-build/issues/4756 + # ("lua-", "luarocks", "LuaSocket", None), + ], +) +def test_repo( + prefix: str, + repo: str, + package: str, + version: str | None, + tmp_path: Path, + testing_config, +): + api.skeletonize( + package, + repo, + version=version, + output_dir=tmp_path, + config=testing_config, + ) + + package_name = f"{prefix}{Path(package).stem}".lower() + assert len( + [ + content + for content in tmp_path.iterdir() + if content.name.startswith(package_name) and content.is_dir() + ] + ) + + +@pytest.mark.parametrize( + "package,version", + [ + pytest.param("sympy", "1.10", id="with version"), + pytest.param(SYMPY_URL, None, id="with url"), + ], +) +def test_sympy(package: str, version: str | None, tmp_path: Path, testing_config): + api.skeletonize( + packages=package, + repo="pypi", + version=version, + config=testing_config, + output_dir=tmp_path, + ) + metadata = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] + assert metadata.version() == "1.10" + + +def test_get_entry_points(pylint_pkginfo, pylint_metadata): + pkginfo = pylint_pkginfo entry_points = get_entry_points(pkginfo) - assert entry_points["entry_points"] == result_metadata_pylint[ - "entry_points"] - assert entry_points["test_commands"] == result_metadata_pylint[ - "test_commands"] + assert entry_points["entry_points"] == pylint_metadata["entry_points"] + assert entry_points["test_commands"] == pylint_metadata["test_commands"] def test_convert_to_flat_list(): @@ -170,85 +253,39 @@ def test_convert_to_flat_list(): def test_is_setuptools_enabled(): assert not is_setuptools_enabled({"entry_points": "STRING"}) - assert not is_setuptools_enabled({ - "entry_points": { - "console_scripts": ["CONSOLE"], - "gui_scripts": ["GUI"], + assert not is_setuptools_enabled( + { + "entry_points": { + "console_scripts": ["CONSOLE"], + "gui_scripts": ["GUI"], + } } - }) + ) - assert is_setuptools_enabled({ - "entry_points": { - "console_scripts": ["CONSOLE"], - "gui_scripts": ["GUI"], - "foo_scripts": ["SCRIPTS"], + assert is_setuptools_enabled( + { + "entry_points": { + "console_scripts": ["CONSOLE"], + "gui_scripts": ["GUI"], + "foo_scripts": ["SCRIPTS"], + } } - }) - - -@pytest.fixture -def result_metadata_pylint(url_pylint_package): - return { - 'run_depends': [ - 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7' - ], - 'build_depends': [ - 'pip', 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7' - ], - 'entry_points': [ - 'pylint = pylint:run_pylint', - 'epylint = pylint:run_epylint', - 'pyreverse = pylint:run_pyreverse', - 'symilar = pylint:run_symilar' - ], - 'test_commands': [ - 'pylint --help', - 'epylint --help', - 'pyreverse --help', - 'symilar --help' - ], - 'tests_require': ['pytest'], - 'version': '2.3.1', - 'pypiurl': url_pylint_package, - 'filename': 'black-2.3.1.tar.gz', - 'digest': [ - 'sha256', - '723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1' - ], - 'import_tests': [ - 'pylint', - 'pylint.checkers', - 'pylint.extensions', - 'pylint.pyreverse', - 'pylint.reporters', - 'pylint.reporters.ureports' - ], - 'summary': 'python code static checker', - 'packagename': 'pylint', - 'home': 'https://github.com/PyCQA/pylint', - 'license': 'GNU General Public (GPL)', - 'license_family': 'LGPL' - } + ) def test_get_dependencies(): assert get_dependencies( - ['astroid >=2.2.0,<3 #COMMENTS', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'], - False - ) == ['astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7'] + ["astroid >=2.2.0,<3 #COMMENTS", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], + False, + ) == ["astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"] assert get_dependencies( - ['astroid >=2.2.0,<3 #COMMENTS', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'], - True - ) == ['setuptools', 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'] + ["astroid >=2.2.0,<3 #COMMENTS", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], True + ) == ["setuptools", "astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"] -def test_get_import_tests(pkginfo_pylint, result_metadata_pylint): - assert get_import_tests(pkginfo_pylint) \ - == result_metadata_pylint["import_tests"] +def test_get_import_tests(pylint_pkginfo, pylint_metadata): + assert get_import_tests(pylint_pkginfo) == pylint_metadata["import_tests"] def test_get_home(): @@ -264,35 +301,27 @@ def test_get_summary(): assert get_summary({"summary": 'SUMMARY "QUOTES"'}) == r"SUMMARY \"QUOTES\"" -def test_license_name(url_pylint_package, pkginfo_pylint): +def test_license_name(pylint_pkginfo): license_name = "GNU General Public License (GPL)" - assert get_license_name(url_pylint_package, pkginfo_pylint, True, {}) \ - == license_name + assert get_license_name(PYLINT_URL, pylint_pkginfo, True, {}) == license_name assert clean_license_name(license_name) == "GNU General Public (GPL)" assert clean_license_name("MIT License") == "MIT" -def test_get_tests_require(pkginfo_pylint, result_metadata_pylint): - assert get_tests_require(pkginfo_pylint) == result_metadata_pylint[ - "tests_require"] +def test_get_tests_require(pylint_pkginfo, pylint_metadata): + assert get_tests_require(pylint_pkginfo) == pylint_metadata["tests_require"] -def test_get_package_metadata( - testing_workdir, - testing_config, - url_pylint_package, - mock_metada_pylint, - result_metadata_pylint -): +def test_get_package_metadata(testing_config, mock_metadata, pylint_metadata): get_package_metadata( - url_pylint_package, - mock_metada_pylint, + PYLINT_URL, + mock_metadata, {}, ".", - "3.7", + "3.9", False, False, - [url_pylint_package], + [PYLINT_URL], False, True, [], @@ -300,163 +329,203 @@ def test_get_package_metadata( config=testing_config, setup_options=[], ) - assert mock_metada_pylint == result_metadata_pylint + assert mock_metadata == pylint_metadata @pytest.mark.slow -def test_pypi_with_setup_options(testing_workdir, testing_config): +def test_pypi_with_setup_options(tmp_path: Path, testing_config): # Use photutils package below because skeleton will fail unless the setup.py is given # the flag --offline because of a bootstrapping a helper file that # occurs by default. # Test that the setup option is used in constructing the skeleton. - api.skeletonize(packages='photutils', repo='pypi', version='0.2.2', - setup_options='--offline', - config=testing_config) + api.skeletonize( + packages="photutils", + repo="pypi", + version="1.10.0", + setup_options="--offline", + config=testing_config, + output_dir=tmp_path, + extra_specs=["extension-helpers"], + ) # Check that the setup option occurs in bld.bat and build.sh. - m = api.render('photutils')[0][0] - assert '--offline' in m.meta['build']['script'] + metadata = api.render(str(tmp_path / "photutils"))[0][0] + assert "--offline" in metadata.meta["build"]["script"] -def test_pypi_pin_numpy(testing_workdir, testing_config): +def test_pypi_pin_numpy(tmp_path: Path, testing_config: Config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. - api.skeletonize(packages='msumastro', repo='pypi', version='0.9.0', - config=testing_config, - pin_numpy=True) - with open(os.path.join('msumastro', 'meta.yaml')) as f: - assert f.read().count('numpy x.x') == 2 - with pytest.raises(DependencyNeedsBuildingError): - api.build('msumastro') + api.skeletonize( + packages="fasttext", + repo="pypi", + version="0.9.2", + config=testing_config, + pin_numpy=True, + output_dir=tmp_path, + ) + assert (tmp_path / "fasttext" / "meta.yaml").read_text().count("numpy x.x") == 2 -def test_pypi_version_sorting(testing_workdir, testing_config): +def test_pypi_version_sorting(tmp_path: Path, testing_config: Config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. - api.skeletonize(packages='impyla', repo='pypi', config=testing_config) - m = api.render('impyla')[0][0] - assert parse_version(m.version()) >= parse_version("0.13.8") + api.skeletonize( + packages="fasttext", + repo="pypi", + config=testing_config, + output_dir=tmp_path, + ) + metadata = api.render(str(tmp_path / "fasttext"))[0][0] + assert parse_version(metadata.version()) >= parse_version("0.9.2") def test_list_skeletons(): skeletons = api.list_skeletons() - assert set(skeletons) == {'pypi', 'cran', 'cpan', 'luarocks', 'rpm'} + assert set(skeletons) == {"pypi", "cran", "cpan", "luarocks", "rpm"} -def test_pypi_with_entry_points(testing_workdir): - api.skeletonize('planemo', repo='pypi', python_version="3.7") - assert os.path.isdir('planemo') +def test_pypi_with_entry_points(tmp_path: Path): + api.skeletonize("planemo", repo="pypi", python_version="3.7", output_dir=tmp_path) + assert (tmp_path / "planemo").is_dir() -def test_pypi_with_version_arg(testing_workdir): +def test_pypi_with_version_arg(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1442 - api.skeletonize('PrettyTable', 'pypi', version='0.7.2') - m = api.render('prettytable')[0][0] - assert parse_version(m.version()) == parse_version("0.7.2") + api.skeletonize("PrettyTable", "pypi", version="0.7.2", output_dir=tmp_path) + metadata = api.render(str(tmp_path / "prettytable"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.7.2") @pytest.mark.slow -def test_pypi_with_extra_specs(testing_workdir, testing_config): +def test_pypi_with_extra_specs(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/1697 # For mpi4py: - testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') - extra_specs = ['cython', 'mpi4py'] + testing_config.channel_urls.append("https://repo.anaconda.com/pkgs/free") + extra_specs = ["cython", "mpi4py"] if not on_win: - extra_specs.append('nomkl') - api.skeletonize('bigfile', 'pypi', extra_specs=extra_specs, - version='0.1.24', python="3.6", config=testing_config) - m = api.render('bigfile')[0][0] - assert parse_version(m.version()) == parse_version("0.1.24") - assert any('cython' in req for req in m.meta['requirements']['host']) - assert any('mpi4py' in req for req in m.meta['requirements']['host']) + extra_specs.append("nomkl") + api.skeletonize( + "bigfile", + "pypi", + extra_specs=extra_specs, + version="0.1.24", + python="3.6", + config=testing_config, + output_dir=tmp_path, + ) + metadata = api.render(str(tmp_path / "bigfile"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.1.24") + assert any("cython" in req for req in metadata.meta["requirements"]["host"]) + assert any("mpi4py" in req for req in metadata.meta["requirements"]["host"]) @pytest.mark.slow -def test_pypi_with_version_inconsistency(testing_workdir, testing_config): +def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: - extra_specs = ['mpi4py'] + extra_specs = ["mpi4py"] if not on_win: - extra_specs.append('nomkl') - testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') - api.skeletonize('mpi4py_test', 'pypi', extra_specs=extra_specs, - version='0.0.10', python="3.6", config=testing_config) - m = api.render('mpi4py_test')[0][0] - assert parse_version(m.version()) == parse_version("0.0.10") + extra_specs.append("nomkl") + testing_config.channel_urls.append("https://repo.anaconda.com/pkgs/free") + api.skeletonize( + "mpi4py_test", + "pypi", + extra_specs=extra_specs, + version="0.0.10", + python="3.6", + config=testing_config, + output_dir=tmp_path, + ) + metadata = api.render(str(tmp_path / "mpi4py_test"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.0.10") -def test_pypi_with_basic_environment_markers(testing_workdir): +def test_pypi_with_basic_environment_markers(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1974 - api.skeletonize('coconut', 'pypi', version='1.2.2') - m = api.render('coconut')[0][0] + api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) + metadata = api.render(tmp_path / "coconut")[0][0] - build_reqs = str(m.meta['requirements']['host']) - run_reqs = str(m.meta['requirements']['run']) + build_reqs = str(metadata.meta["requirements"]["host"]) + run_reqs = str(metadata.meta["requirements"]["run"]) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs - if sys.version_info >= (2, 7): - assert "pygments" in build_reqs - assert "pygments" in run_reqs - else: - assert "pygments" not in build_reqs - assert "pygments" not in run_reqs + assert "pygments" in build_reqs + assert "pygments" in run_reqs -def test_setuptools_test_requirements(testing_workdir): - api.skeletonize(packages='hdf5storage', repo='pypi') - m = api.render('hdf5storage')[0][0] - assert m.meta['test']['requires'] == ['nose >=1.0'] +def test_setuptools_test_requirements(tmp_path: Path): + api.skeletonize(packages="hdf5storage", repo="pypi", output_dir=tmp_path) + metadata = api.render(str(tmp_path / "hdf5storage"))[0][0] + assert metadata.meta["test"]["requires"] == ["nose >=1.0"] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") -def test_pypi_section_order_preserved(testing_workdir): +def test_pypi_section_order_preserved(tmp_path: Path): """ Test whether sections have been written in the correct order. """ from conda_build.render import FIELDS - from conda_build.skeletons.pypi import (ABOUT_ORDER, - REQUIREMENTS_ORDER, - PYPI_META_STATIC) + from conda_build.skeletons.pypi import ( + ABOUT_ORDER, + PYPI_META_STATIC, + REQUIREMENTS_ORDER, + ) - api.skeletonize(packages='sympy', repo='pypi') + api.skeletonize(packages="sympy", repo="pypi", output_dir=tmp_path) # Since we want to check the order of items in the recipe (not whether # the metadata values themselves are sensible), read the file as (ordered) # yaml, and check the order. - with open('sympy/meta.yaml') as file: - lines = [ln for ln in file.readlines() if not ln.startswith("{%")] + lines = [ + line + for line in (tmp_path / "sympy" / "meta.yaml").read_text().splitlines() + if not line.startswith("{%") + ] # The loader below preserves the order of entries... - recipe = ruamel_yaml.load('\n'.join(lines), - Loader=ruamel_yaml.RoundTripLoader) + recipe = ruamel.yaml.load("\n".join(lines), Loader=ruamel.yaml.RoundTripLoader) major_sections = list(recipe.keys()) # Blank fields are omitted when skeletonizing, so prune any missing ones # before comparing. pruned_fields = [f for f in FIELDS if f in major_sections] assert major_sections == pruned_fields - assert list(recipe['about']) == ABOUT_ORDER - assert list(recipe['requirements']) == REQUIREMENTS_ORDER + assert list(recipe["about"]) == ABOUT_ORDER + assert list(recipe["requirements"]) == REQUIREMENTS_ORDER for k, v in PYPI_META_STATIC.items(): assert list(v.keys()) == list(recipe[k]) +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.slow @pytest.mark.flaky(rerun=5, reruns_delay=2) -@pytest.mark.skipif(not external.find_executable("shellcheck"), reason="requires shellcheck >=0.7.0") +@pytest.mark.skipif(on_win, reason="shellcheck is not available on Windows") @pytest.mark.parametrize( - "package, repo", [("r-rmarkdown", "cran"), ("Perl::Lint", "cpan"), ("screen", "rpm")] + "package, repo", + [ + ("r-rmarkdown", "cran"), + ("Perl::Lint", "cpan"), + ("screen", "rpm"), + ], ) -def test_build_sh_shellcheck_clean(package, repo, testing_workdir, testing_config): - api.skeletonize(packages=package, repo=repo, output_dir=testing_workdir, config=testing_config) - - matches = [] - for root, dirnames, filenames in os.walk(testing_workdir): - for filename in fnmatch.filter(filenames, "build.sh"): - matches.append(os.path.join(root, filename)) +def test_build_sh_shellcheck_clean( + package: str, repo: str, tmp_path: Path, testing_config +): + api.skeletonize( + packages=package, + repo=repo, + output_dir=tmp_path, + config=testing_config, + ) - build_sh = matches[0] + build_sh = next( + Path(root, filename) + for root, _, filenames in os.walk(tmp_path) + for filename in filenames + if filename == "build.sh" + ) cmd = [ "shellcheck", "--enable=all", @@ -467,7 +536,6 @@ def test_build_sh_shellcheck_clean(package, repo, testing_workdir, testing_confi ] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - sc_stdout, _ = p.communicate() - findings = sc_stdout.decode(sys.stdout.encoding).replace("\r\n", "\n").splitlines() - assert findings == [] + stdout, _ = p.communicate() + assert not stdout assert p.returncode == 0 diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py new file mode 100644 index 0000000000..5945158023 --- /dev/null +++ b/tests/test_api_skeleton_cpan.py @@ -0,0 +1,24 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Integrative tests of the CPAN skeleton that start from +conda_build.api.skeletonize and check the output files +""" + +import pytest + +from conda_build import api +from conda_build.jinja_context import compiler + + +@pytest.mark.slow +@pytest.mark.flaky(rerun=5, reruns_delay=2) +def test_xs_needs_c_compiler(testing_config): + """Perl packages with XS files need a C compiler""" + # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. + api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) + metadata = api.render( + "perl-sub-identify/0.14", finalize=False, bypass_env_check=True + )[0][0] + build_requirements = metadata.get_value("requirements/build") + assert compiler("c", testing_config) in build_requirements diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 52babe2098..57e9d02550 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -1,103 +1,96 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Integrative tests of the CRAN skeleton that start from conda_build.api.skeletonize and check the output files -''' +""" +from pathlib import Path +from typing import Sequence -import os import pytest from conda_build import api from conda_build.skeletons.cran import CRAN_BUILD_SH_SOURCE, CRAN_META -from conda_build.utils import ensure_list - - -# CRAN packages to test license_file entry. -# (package, license_id, license_family, license_files) -cran_packages = [ - ("r-rmarkdown", "GPL-3", "GPL3", "GPL-3"), # cran: 'GPL-3' - ( - # cran: 'Artistic License 2.0' - "r-badger", - "Artistic-2.0", - "OTHER", - "Artistic-2.0", - ), - ("r-udpipe", "MPL-2.0", "OTHER", ""), # cran: 'MPL-2.0' - ("r-broom", "MIT", "MIT", ["MIT", "LICENSE"]), # cran: 'MIT + file LICENSE' - ( - # cran: 'BSD 2-clause License + file LICENSE' - "r-meanr", - "BSD_2_clause", - "BSD", - ["BSD_2_clause", "LICENSE"], - ), - ("r-zoo", "GPL-2 | GPL-3", "GPL3", ["GPL-2", "GPL-3"]), # cran: 'GPL-2 | GPL-3' - ("r-magree", "GPL-3 | GPL-2", "GPL3", ["GPL-3", "GPL-2"]), # cran: 'GPL-3 | GPL-2' - ("r-mglm", "GPL-2", "GPL2", "GPL-2"), # cran: 'GPL (>= 2)' -] +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.slow -@pytest.mark.parametrize("package, license_id, license_family, license_files", cran_packages) -@pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_license(package, license_id, license_family, license_files, testing_workdir, testing_config): - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config) - m = api.render(os.path.join(package, 'meta.yaml'))[0][0] - m_license_id = m.get_value('about/license') - assert m_license_id == license_id - m_license_family = m.get_value('about/license_family') - assert m_license_family == license_family - m_license_files = ensure_list(m.get_value('about/license_file', '')) - license_files = ensure_list(license_files) - for m_license_file in m_license_files: - assert os.path.basename(m_license_file) in license_files - - -# CRAN packages to test skip entry. -# (package, skip_text) -cran_os_type_pkgs = [ - ('bigReg', 'skip: True # [not unix]'), - ('blatr', 'skip: True # [not win]') - ] - - -@pytest.mark.parametrize("package, skip_text", cran_os_type_pkgs) +@pytest.mark.parametrize( + "package,license_id,license_family,license_files", + [ + ("r-rmarkdown", "GPL-3", "GPL3", {"GPL-3"}), + ("r-fastdigest", "Artistic-2.0", "OTHER", {"Artistic-2.0"}), + ("r-tokenizers.bpe", "MPL-2.0", "OTHER", set()), + ("r-broom", "MIT", "MIT", {"MIT", "LICENSE"}), + ("r-meanr", "BSD_2_clause", "BSD", {"BSD_2_clause", "LICENSE"}), + ("r-base64enc", "GPL-2 | GPL-3", "GPL3", {"GPL-2", "GPL-3"}), + ("r-magree", "GPL-3 | GPL-2", "GPL3", {"GPL-3", "GPL-2"}), + ("r-mglm", "GPL-2", "GPL2", {"GPL-2"}), + ], +) +# @pytest.mark.flaky(rerun=5, reruns_delay=2) +def test_cran_license( + package: str, + license_id: str, + license_family: str, + license_files: Sequence[str], + tmp_path: Path, + testing_config, +): + api.skeletonize( + packages=package, repo="cran", output_dir=tmp_path, config=testing_config + ) + metadata = api.render(str(tmp_path / package / "meta.yaml"))[0][0] + + assert metadata.get_value("about/license") == license_id + assert metadata.get_value("about/license_family") == license_family + assert { + Path(license).name for license in metadata.get_value("about/license_file", "") + } == set(license_files) + + +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") +@pytest.mark.parametrize( + "package,skip_text", + [ + ("bigReg", "skip: True # [not unix]"), + ("blatr", "skip: True # [not win]"), + ], +) @pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_os_type(package, skip_text, testing_workdir, testing_config): - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config) - fpath = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml') - with open(fpath) as f: - assert skip_text in f.read() +def test_cran_os_type(package: str, skip_text: str, tmp_path: Path, testing_config): + api.skeletonize( + packages=package, repo="cran", output_dir=tmp_path, config=testing_config + ) + assert skip_text in (tmp_path / f"r-{package.lower()}" / "meta.yaml").read_text() # Test cran skeleton argument --no-comments @pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_no_comments(testing_workdir, testing_config): +def test_cran_no_comments(tmp_path: Path, testing_config): package = "data.table" - meta_yaml_comment = ' # This is required to make R link correctly on Linux.' - build_sh_comment = '# Add more build steps here, if they are necessary.' - build_sh_shebang = '#!/bin/bash' + meta_yaml_comment = " # This is required to make R link correctly on Linux." + build_sh_comment = "# Add more build steps here, if they are necessary." + build_sh_shebang = "#!/bin/bash" # Check that comments are part of the templates assert meta_yaml_comment in CRAN_META assert build_sh_comment in CRAN_BUILD_SH_SOURCE assert build_sh_shebang in CRAN_BUILD_SH_SOURCE - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config, no_comments=True) + api.skeletonize( + packages=package, + repo="cran", + output_dir=tmp_path, + config=testing_config, + no_comments=True, + ) # Check that comments got removed - meta_yaml = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml') - with open(meta_yaml) as f: - assert meta_yaml_comment not in f.read() + meta_yaml_text = (tmp_path / f"r-{package.lower()}" / "meta.yaml").read_text() + assert meta_yaml_comment not in meta_yaml_text - build_sh = os.path.join(testing_workdir, 'r-' + package.lower(), 'build.sh') - with open(build_sh) as f: - build_sh_text = f.read() - assert build_sh_comment not in build_sh_text - assert build_sh_shebang in build_sh_text + build_sh_text = (tmp_path / f"r-{package.lower()}" / "build.sh").read_text() + assert build_sh_comment not in build_sh_text + assert build_sh_shebang in build_sh_text diff --git a/tests/test_api_test.py b/tests/test_api_test.py index 4bb70a220c..2bb76838aa 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -9,53 +9,58 @@ import pytest from conda_build import api + from .utils import metadata_dir @pytest.mark.sanity -def test_recipe_test(testing_workdir, testing_config): +def test_recipe_test(testing_config): """Test calling conda build -t """ - recipe = os.path.join(metadata_dir, 'has_prefix_files') + recipe = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, notest=True, anaconda_upload=False) api.test(recipe, config=metadata.config) @pytest.mark.sanity -def test_package_test(testing_workdir, testing_config): +def test_package_test(testing_config): """Test calling conda build -t - rather than """ - recipe = os.path.join(metadata_dir, 'has_prefix_files') + recipe = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe, config=testing_config)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) api.test(outputs[0], config=metadata.config) -def test_package_test_without_recipe_in_package(testing_workdir, testing_metadata): +def test_package_test_without_recipe_in_package(testing_metadata): """Can't test packages after building if recipe is not included. Not enough info to go on.""" testing_metadata.config.include_recipe = False output = api.build(testing_metadata, notest=True, copy_test_source_files=True)[0] api.test(output, config=testing_metadata.config) -def test_package_with_jinja2_does_not_redownload_source(testing_workdir, testing_config, mocker): - recipe = os.path.join(metadata_dir, 'jinja2_build_str') +def test_package_with_jinja2_does_not_redownload_source( + testing_config, + mocker, + conda_build_test_recipe_envvar: str, +): + recipe = os.path.join(metadata_dir, "jinja2_build_str") metadata = api.render(recipe, config=testing_config, dirty=True)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 - provide = mocker.patch('conda_build.source.provide') + provide = mocker.patch("conda_build.source.provide") api.test(outputs[0], config=metadata.config) assert not provide.called @pytest.mark.sanity def test_api_extra_dep(testing_metadata): - testing_metadata.meta['test']['imports'] = ['click'] + testing_metadata.meta["test"]["imports"] = ["click"] output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] # extra_deps will add it in - api.test(output, config=testing_metadata.config, extra_deps=['click']) + api.test(output, config=testing_metadata.config, extra_deps=["click"]) # missing click dep will fail tests with pytest.raises(SystemExit): diff --git a/tests/test_api_update_index.py b/tests/test_api_update_index.py deleted file mode 100644 index cf1f06c44d..0000000000 --- a/tests/test_api_update_index.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import os - -from conda_build import api - - -def test_update_index(testing_workdir): - api.update_index(testing_workdir) - files = "repodata.json", "repodata.json.bz2" - for f in files: - assert os.path.isfile(os.path.join(testing_workdir, 'noarch', f)) diff --git a/tests/test_build.py b/tests/test_build.py index e7e086f8e6..839cce4b9e 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -5,56 +5,38 @@ and is more unit-test oriented. """ +from __future__ import annotations + import json import os import sys +from contextlib import nullcontext +from pathlib import Path import pytest -from conda_build import build, api -from conda_build.utils import on_win - -from .utils import metadata_dir, get_noarch_python_meta - -prefix_tests = {"normal": os.path.sep} -if sys.platform == "win32": - prefix_tests.update({"double_backslash": "\\\\", - "forward_slash": "/"}) - - -def _write_prefix(filename, prefix, replacement): - with open(filename, "w") as f: - f.write(prefix.replace(os.path.sep, replacement)) - f.write("\n") +from conda_build import api, build +from .utils import get_noarch_python_meta, metadata_dir -def test_find_prefix_files(testing_workdir): - """ - Write test output that has the prefix to be found, then verify that the prefix finding - identified the correct number of files. - """ - # create text files to be replaced - files = [] - for slash_style in prefix_tests: - filename = os.path.join(testing_workdir, "%s.txt" % slash_style) - _write_prefix(filename, testing_workdir, prefix_tests[slash_style]) - files.append(filename) - assert len(list(build.have_prefix_files(files, testing_workdir))) == len(files) - - -def test_build_preserves_PATH(testing_workdir, testing_config): - m = api.render(os.path.join(metadata_dir, 'source_git'), config=testing_config)[0][0] - ref_path = os.environ['PATH'] - build.build(m, stats=None) - assert os.environ['PATH'] == ref_path +def test_build_preserves_PATH(testing_config): + metadata = api.render( + os.path.join(metadata_dir, "source_git"), config=testing_config + )[0][0] + ref_path = os.environ["PATH"] + build.build(metadata, stats=None) + assert os.environ["PATH"] == ref_path def test_sanitize_channel(): - test_url = 'https://conda.anaconda.org/t/ms-534991f2-4123-473a-b512-42025291b927/somechannel' - assert build.sanitize_channel(test_url) == 'https://conda.anaconda.org/somechannel' - test_url_auth = 'https://myuser:mypass@conda.anaconda.org/somechannel' - assert build.sanitize_channel(test_url_auth) == 'https://conda.anaconda.org/somechannel' + test_url = "https://conda.anaconda.org/t/ms-534991f2-4123-473a-b512-42025291b927/somechannel" + assert build.sanitize_channel(test_url) == "https://conda.anaconda.org/somechannel" + test_url_auth = "https://myuser:mypass@conda.anaconda.org/somechannel" + assert ( + build.sanitize_channel(test_url_auth) + == "https://conda.anaconda.org/somechannel" + ) def test_get_short_path(testing_metadata): @@ -69,9 +51,14 @@ def test_get_short_path(testing_metadata): def test_has_prefix(): - files_with_prefix = [("prefix/path", "text", "short/path/1"), - ("prefix/path", "text", "short/path/2")] - assert build.has_prefix("short/path/1", files_with_prefix) == ("prefix/path", "text") + files_with_prefix = [ + ("prefix/path", "text", "short/path/1"), + ("prefix/path", "text", "short/path/2"), + ] + assert build.has_prefix("short/path/1", files_with_prefix) == ( + "prefix/path", + "text", + ) assert build.has_prefix("short/path/nope", files_with_prefix) == (None, None) @@ -81,16 +68,14 @@ def test_is_no_link(): assert build.is_no_link(no_link, "path/nope") is None -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.link is not available so can't setup test") def test_sorted_inode_first_path(testing_workdir): - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_one_hardlink = os.path.join(testing_workdir, "one_hl") - open(path_one, "a").close() - open(path_two, "a").close() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_hardlink = Path(testing_workdir, "one_hl") - os.link(path_one, path_one_hardlink) + path_one.touch() + path_two.touch() + os.link(path_one, path_hardlink) files = ["one", "two", "one_hl"] assert build.get_inode_paths(files, "one", testing_workdir) == ["one", "one_hl"] @@ -99,54 +84,61 @@ def test_sorted_inode_first_path(testing_workdir): def test_create_info_files_json(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_foo = os.path.join(testing_workdir, "foo") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + Path(testing_workdir, "one").touch() + Path(testing_workdir, "two").touch() + Path(testing_workdir, "foo").touch() + files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "foo"] + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) + + assert json.loads((info_dir / "paths.json").read_text()) == { + "paths": [ + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } + - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { - "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output - - -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.symlink is not available so can't setup test") def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_three = os.path.join(testing_workdir, "three") # do not make this one - path_foo = os.path.join(testing_workdir, "foo") - path_two_symlink = os.path.join(testing_workdir, "two_sl") - symlink_to_nowhere = os.path.join(testing_workdir, "nowhere_sl") - recursive_symlink = os.path.join(testing_workdir, "recursive_sl") - cycle1_symlink = os.path.join(testing_workdir, "cycle1_sl") - cycle2_symlink = os.path.join(testing_workdir, "cycle2_sl") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_three = Path(testing_workdir, "three") # do not make this one + path_foo = Path(testing_workdir, "foo") + path_two_symlink = Path(testing_workdir, "two_sl") + symlink_to_nowhere = Path(testing_workdir, "nowhere_sl") + recursive_symlink = Path(testing_workdir, "recursive_sl") + cycle1_symlink = Path(testing_workdir, "cycle1_sl") + cycle2_symlink = Path(testing_workdir, "cycle2_sl") + + path_one.touch() + path_two.touch() + path_foo.touch() os.symlink(path_two, path_two_symlink) os.symlink(path_three, symlink_to_nowhere) @@ -156,88 +148,135 @@ def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): os.symlink(cycle2_symlink, cycle1_symlink) files_with_prefix = [("prefix/path", "text", "foo")] - files = ["one", "two", "foo", "two_sl", "nowhere_sl", "recursive_sl", "cycle1_sl", "cycle2_sl"] - - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { + files = [ + "one", + "two", + "foo", + "two_sl", + "nowhere_sl", + "recursive_sl", + "cycle1_sl", + "cycle2_sl", + ] + + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) + assert json.loads((info_dir / "paths.json").read_text()) == { "paths": [ - {"path_type": "softlink", "_path": "cycle1_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "cycle2_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "nowhere_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "recursive_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "two_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output - - -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.link is not available so can't setup test") + { + "path_type": "softlink", + "_path": "cycle1_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "cycle2_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "nowhere_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "recursive_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "two_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } + + def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_foo = os.path.join(testing_workdir, "foo") - path_one_hardlink = os.path.join(testing_workdir, "one_hl") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_foo = Path(testing_workdir, "foo") + path_one_hardlink = Path(testing_workdir, "one_hl") + + path_one.touch() + path_two.touch() + path_foo.touch() os.link(path_one, path_one_hardlink) + files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "one_hl", "foo"] - - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { - "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", "inode_paths": ["one", "one_hl"], - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one_hl", "inode_paths": ["one", "one_hl"], - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output - - -def test_rewrite_output(testing_workdir, testing_config, capsys): + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) + assert json.loads((info_dir / "paths.json").read_text()) == { + "paths": [ + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "inode_paths": ["one", "one_hl"], + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one_hl", + "inode_paths": ["one", "one_hl"], + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } + + +def test_rewrite_output(testing_config, capsys): api.build(os.path.join(metadata_dir, "_rewrite_env"), config=testing_config) captured = capsys.readouterr() stdout = captured.out - if sys.platform == 'win32': + if sys.platform == "win32": assert "PREFIX=%PREFIX%" in stdout assert "LIBDIR=%PREFIX%\\lib" in stdout assert "PWD=%SRC_DIR%" in stdout @@ -247,3 +286,41 @@ def test_rewrite_output(testing_workdir, testing_config, capsys): assert "LIBDIR=$PREFIX/lib" in stdout assert "PWD=$SRC_DIR" in stdout assert "BUILD_PREFIX=$BUILD_PREFIX" in stdout + + +@pytest.mark.parametrize( + "script,error,interpreter", + [ + # known interpreter + ("foo.sh", None, build.INTERPRETER_BASH), + ("foo.bat", None, build.INTERPRETER_BAT), + ("foo.ps1", None, build.INTERPRETER_POWERSHELL), + ("foo.py", None, build.INTERPRETER_PYTHON), + ("foo.bar.sh", None, build.INTERPRETER_BASH), + ("foo.bar.bat", None, build.INTERPRETER_BAT), + ("foo.bar.ps1", None, build.INTERPRETER_POWERSHELL), + ("foo.bar.py", None, build.INTERPRETER_PYTHON), + # unknown interpreter + ("foo", NotImplementedError, None), + ("foo.unknown", NotImplementedError, None), + ("foo.zsh", NotImplementedError, None), + ("foo.csh", NotImplementedError, None), + ("foo.exe", NotImplementedError, None), + ("foo.exe", NotImplementedError, None), + ("foo.sh.other", NotImplementedError, None), + ("foo.bat.other", NotImplementedError, None), + ("foo.ps1.other", NotImplementedError, None), + ("foo.py.other", NotImplementedError, None), + ("foo.sh_what", NotImplementedError, None), + ("foo.bat_what", NotImplementedError, None), + ("foo.ps1_what", NotImplementedError, None), + ("foo.py_what", NotImplementedError, None), + ], +) +def test_guess_interpreter( + script: str, + error: type[Exception] | None, + interpreter: list[str], +): + with pytest.raises(error) if error else nullcontext(): + assert build.guess_interpreter(script) == interpreter diff --git a/tests/test_check.py b/tests/test_check.py index 8e5109c771..ee1f2bebc6 100644 --- a/tests/test_check.py +++ b/tests/test_check.py @@ -3,9 +3,10 @@ import os from conda_build import api + from .utils import metadata_dir def test_check_multiple_sources(): - recipe = os.path.join(metadata_dir, 'multiple_sources') + recipe = os.path.join(metadata_dir, "multiple_sources") assert api.check(recipe) diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index 66d51fbbe3..0000000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,751 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# For the most part, all functionality should be tested with the api tests, -# because they actually provide coverage. These tests are here to make -# sure that the CLI still works. - -from glob import glob -import json -import os -import re -import sys -import yaml - -import pytest - -from conda_build.conda_interface import cc_conda_build, context, download, reset_context -from conda_build.tarcheck import TarCheck - -from conda_build import api -from conda_build.config import Config, zstd_compression_level_default -from conda_build.utils import get_site_packages, on_win, get_build_folders, package_has_file, tar_xf -from conda_build.conda_interface import TemporaryDirectory -from conda_build.exceptions import DependencyNeedsBuildingError -import conda_build -from .utils import metadata_dir - -import conda_build.cli.main_build as main_build -import conda_build.cli.main_render as main_render -import conda_build.cli.main_convert as main_convert -import conda_build.cli.main_develop as main_develop -import conda_build.cli.main_metapackage as main_metapackage -import conda_build.cli.main_skeleton as main_skeleton -import conda_build.cli.main_inspect as main_inspect -import conda_build.cli.main_index as main_index - - -def _reset_config(search_path=None): - reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, 'conda_build') else {} - ) - - -@pytest.mark.sanity -def test_build(): - args = ['--no-anaconda-upload', os.path.join(metadata_dir, "empty_sections"), '--no-activate', - '--no-anaconda-upload'] - main_build.execute(args) - - -@pytest.mark.serial -def test_build_add_channel(): - """This recipe requires the conda_build_test_requirement package, which is - only on the conda_build_test channel. This verifies that the -c argument - works.""" - - args = ['-c', 'conda_build_test', '--no-activate', '--no-anaconda-upload', - os.path.join(metadata_dir, "_recipe_requiring_external_channel")] - main_build.execute(args) - - -def test_build_without_channel_fails(testing_workdir): - # remove the conda forge channel from the arguments and make sure that we fail. If we don't, - # we probably have channels in condarc, and this is not a good test. - args = ['--no-anaconda-upload', '--no-activate', - os.path.join(metadata_dir, "_recipe_requiring_external_channel")] - with pytest.raises(DependencyNeedsBuildingError): - main_build.execute(args) - - -def test_render_add_channel(): - """This recipe requires the conda_build_test_requirement package, which is - only on the conda_build_test channel. This verifies that the -c argument - works for rendering.""" - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, 'out.yaml') - args = ['-c', 'conda_build_test', os.path.join(metadata_dir, - "_recipe_requiring_external_channel"), '--file', rendered_filename] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [pkg for pkg in rendered_meta['requirements']['build'] if - 'conda_build_test_requirement' in pkg][0] - required_package_details = required_package_string.split(' ') - assert len(required_package_details) > 1, ("Expected version number on successful " - "rendering, but got only {}".format(required_package_details)) - assert required_package_details[1] == '1.0', f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" - - -def test_render_without_channel_fails(): - # do make extra channel available, so the required package should not be found - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, 'out.yaml') - args = ['--override-channels', os.path.join(metadata_dir, "_recipe_requiring_external_channel"), '--file', rendered_filename] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [pkg for pkg in - rendered_meta.get('requirements', {}).get('build', []) - if 'conda_build_test_requirement' in pkg][0] - assert required_package_string == 'conda_build_test_requirement', \ - f"Expected to get only base package name because it should not be found, but got :{required_package_string}" - - -def test_no_filename_hash(testing_workdir, testing_metadata, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--output', testing_workdir, '--old-build-string'] - main_render.execute(args) - output, error = capfd.readouterr() - assert not re.search('h[0-9a-f]{%d}' % testing_metadata.config.hash_length, output) - - args = ['--no-anaconda-upload', '--no-activate', testing_workdir, '--old-build-string'] - main_build.execute(args) - output, error = capfd.readouterr() - assert not re.search('test_no_filename_hash.*h[0-9a-f]{%d}' % testing_metadata.config.hash_length, output) - assert not re.search('test_no_filename_hash.*h[0-9a-f]{%d}' % testing_metadata.config.hash_length, error) - - -def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--output', testing_workdir] - main_render.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_metadata.config.host_subdir, - "test_render_output_build_path-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert output.rstrip() == test_path, error - assert error == "" - - -def test_render_output_build_path_and_file(testing_workdir, testing_metadata, capfd, caplog): - api.output_yaml(testing_metadata, 'meta.yaml') - rendered_filename = 'out.yaml' - args = ['--output', '--file', rendered_filename, testing_workdir] - main_render.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_metadata.config.host_subdir, - "test_render_output_build_path_and_file-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert output.rstrip() == test_path, error - assert error == "" - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - assert rendered_meta['package']['name'] == 'test_render_output_build_path_and_file' - - -def test_build_output_build_path(testing_workdir, testing_metadata, testing_config, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - testing_config.verbose = False - testing_config.debug = False - args = ['--output', testing_workdir] - main_build.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - "test_build_output_build_path-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert test_path == output.rstrip(), error - assert error == "" - - -def test_build_output_build_path_multiple_recipes(testing_workdir, testing_metadata, - testing_config, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - testing_config.verbose = False - skip_recipe = os.path.join(metadata_dir, "build_skip") - args = ['--output', testing_workdir, skip_recipe] - - main_build.execute(args) - - test_path = lambda pkg: os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, pkg) - test_paths = [test_path("test_build_output_build_path_multiple_recipes-1.0-1.tar.bz2"), ] - - output, error = capfd.readouterr() - # assert error == "" - assert output.rstrip().splitlines() == test_paths, error - - -def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config): - args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', testing_config.croot, - '--no-anaconda-upload'] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], 'binary-has-prefix', refresh_mode='forced') - assert data - if hasattr(data, 'decode'): - data = data.decode('UTF-8') - assert 'conda-build-test-has-prefix-files_1' in data - - -@pytest.mark.sanity -@pytest.mark.skipif(on_win, reason="prefix is always short on win.") -def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): - recipe_path = os.path.join(metadata_dir, '_test_long_test_prefix') - args = [recipe_path, '--no-anaconda-upload'] - main_build.execute(args) - - args.append('--no-long-test-prefix') - with pytest.raises(SystemExit): - main_build.execute(args) - - -def test_build_no_build_id(testing_workdir, testing_config): - args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id', - '--croot', testing_config.croot, '--no-activate', '--no-anaconda-upload'] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], 'binary-has-prefix', refresh_mode='forced') - assert data - if hasattr(data, 'decode'): - data = data.decode('UTF-8') - assert 'has_prefix_files_1' not in data - - -def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_config): - """Test that building two recipes in one CLI call separates the build environment for each""" - os.makedirs('recipe1') - os.makedirs('recipe2') - api.output_yaml(testing_metadata, 'recipe1/meta.yaml') - with open('recipe1/run_test.py', 'w') as f: - f.write("import os; assert 'test_build_multiple_recipes' in os.getenv('PREFIX')") - testing_metadata.meta['package']['name'] = 'package2' - api.output_yaml(testing_metadata, 'recipe2/meta.yaml') - with open('recipe2/run_test.py', 'w') as f: - f.write("import os; assert 'package2' in os.getenv('PREFIX')") - args = ['--no-anaconda-upload', 'recipe1', 'recipe2'] - main_build.execute(args) - - -def test_build_output_folder(testing_workdir, testing_metadata, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - with TemporaryDirectory() as tmp: - out = os.path.join(tmp, 'out') - args = [testing_workdir, '--no-build-id', - '--croot', tmp, '--no-activate', '--no-anaconda-upload', - '--output-folder', out] - output = main_build.execute(args)[0] - assert os.path.isfile(os.path.join(out, testing_metadata.config.host_subdir, - os.path.basename(output))) - - -def test_build_source(testing_workdir): - with TemporaryDirectory() as tmp: - args = [os.path.join(metadata_dir, '_pyyaml_find_header'), '--source', '--no-build-id', - '--croot', tmp, '--no-activate', '--no-anaconda-upload', ] - main_build.execute(args) - assert os.path.isfile(os.path.join(tmp, 'work', 'setup.py')) - - -def test_render_output_build_path_set_python(testing_workdir, testing_metadata, capfd): - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - api.output_yaml(testing_metadata, 'meta.yaml') - # build the other major thing, whatever it is - if sys.version_info.major == 3: - version = "2.7" - else: - version = "3.5" - - api.output_yaml(testing_metadata, 'meta.yaml') - metadata = api.render(testing_workdir, python=version)[0][0] - - args = ['--output', testing_workdir, '--python', version] - main_render.execute(args) - - _hash = metadata.hash_dependencies() - test_path = "test_render_output_build_path_set_python-1.0-py{}{}{}_1.tar.bz2".format( - version.split('.')[0], version.split('.')[1], _hash) - output, error = capfd.readouterr() - assert os.path.basename(output.rstrip()) == test_path, error - - -@pytest.mark.sanity -def test_skeleton_pypi(testing_workdir, testing_config): - args = ['pypi', 'peppercorn'] - main_skeleton.execute(args) - assert os.path.isdir('peppercorn') - - # ensure that recipe generated is buildable - main_build.execute(('peppercorn',)) - - -@pytest.mark.sanity -def test_skeleton_pypi_compatible_versions(testing_workdir, testing_config): - args = ['pypi', 'openshift'] - main_skeleton.execute(args) - assert os.path.isdir('openshift') - - -@pytest.mark.slow -def test_skeleton_pypi_arguments_work(testing_workdir): - """ - These checks whether skeleton executes without error when these - options are specified on the command line AND whether the underlying - functionality works as a regression test for: - - https://github.com/conda/conda-build/pull/1384 - """ - args = ['pypi', 'msumastro', '--version=1.1.6', '--pin-numpy'] - main_skeleton.execute(args) - assert os.path.isdir('msumastro') - - # Deliberately bypass metadata reading in conda build to get as - # close to the "ground truth" as possible. - with open(os.path.join('msumastro', 'meta.yaml')) as f: - assert f.read().count('numpy x.x') == 2 - - args = ['pypi', 'photutils', '--version=0.2.2', '--setup-options=--offline'] - main_skeleton.execute(args) - assert os.path.isdir('photutils') - # Check that the setup option occurs in bld.bat and build.sh. - - m = api.render('photutils')[0][0] - assert '--offline' in m.meta['build']['script'] - assert m.version() == '0.2.2' - - -def test_metapackage(testing_config, testing_workdir): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test', '1.0', '-d', 'bzip2', '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test-1.0-0.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_build_number(testing_config, testing_workdir): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test_build_number', '1.0', '-d', 'bzip2', '--build-number', '1', - '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test_build_number-1.0-1.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_build_string(testing_config, testing_workdir): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test_build_string', '1.0', '-d', 'bzip2', '--build-string', 'frank', - '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test_build_string-1.0-frank*.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_metadata(testing_config, testing_workdir): - args = ['metapackage_testing_metadata', '1.0', '-d', 'bzip2', "--home", "http://abc.com", - "--summary", "wee", "--license", "BSD", '--no-anaconda-upload'] - main_metapackage.execute(args) - - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_testing_metadata-1.0-0.tar.bz2'))[0] - assert os.path.isfile(test_path) - info = json.loads(package_has_file(test_path, 'info/index.json')) - assert info['license'] == 'BSD' - info = json.loads(package_has_file(test_path, 'info/about.json')) - assert info['home'] == 'http://abc.com' - assert info['summary'] == 'wee' - - -def testing_index(testing_workdir): - args = ['.'] - main_index.execute(args) - assert os.path.isfile('noarch/repodata.json') - - -def test_inspect_installable(testing_workdir): - args = ['channels', '--test-installable', 'conda-team'] - main_inspect.execute(args) - - -def test_inspect_linkages(testing_workdir, capfd): - # get a package that has known object output - args = ['linkages', 'python'] - if sys.platform == 'win32': - with pytest.raises(SystemExit) as exc: - main_inspect.execute(args) - assert 'conda inspect linkages is only implemented in Linux and OS X' in exc - else: - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'libncursesw' in output - - -def test_inspect_objects(testing_workdir, capfd): - # get a package that has known object output - args = ['objects', 'python'] - if sys.platform != 'darwin': - with pytest.raises(SystemExit) as exc: - main_inspect.execute(args) - assert 'conda inspect objects is only implemented in OS X' in exc - else: - main_inspect.execute(args) - output, error = capfd.readouterr() - assert re.search('rpath:.*@loader_path', output) - - -@pytest.mark.skipif(on_win, reason="Windows prefix length doesn't matter (yet?)") -def test_inspect_prefix_length(testing_workdir, capfd): - from conda_build import api - # build our own known-length package here - test_base = os.path.expanduser("~/cbtmp") - config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) - recipe_path = os.path.join(metadata_dir, "has_prefix_files") - config.prefix_length = 80 - outputs = api.build(recipe_path, config=config, notest=True) - - args = ['prefix-lengths'] + outputs - with pytest.raises(SystemExit): - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'Packages with binary prefixes shorter than' in output - assert all(fn in output for fn in outputs) - - config.prefix_length = 255 - # reset the build id so that a new one is computed - config._build_id = "" - api.build(recipe_path, config=config, notest=True) - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'No packages found with binary prefixes shorter' in output - - -def test_inspect_hash_input(testing_metadata, testing_workdir, capfd): - testing_metadata.meta['requirements']['build'] = ['zlib'] - api.output_yaml(testing_metadata, 'meta.yaml') - output = api.build(testing_workdir, notest=True)[0] - with open(os.path.join(testing_workdir, 'conda_build_config.yaml'), 'w') as f: - yaml.dump({'zlib': ['1.2.11']}, f) - args = ['hash-inputs', output] - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'zlib' in output - - -def test_develop(testing_env): - f = "https://pypi.io/packages/source/c/conda_version_test/conda_version_test-0.1.0-1.tar.gz" - download(f, "conda_version_test.tar.gz") - tar_xf("conda_version_test.tar.gz", testing_env) - extract_folder = 'conda_version_test-0.1.0-1' - cwd = os.getcwd() - args = ['-p', testing_env, extract_folder] - main_develop.execute(args) - py_ver = '.'.join((str(sys.version_info.major), str(sys.version_info.minor))) - with open(os.path.join(get_site_packages(testing_env, py_ver), 'conda.pth')) as f_pth: - assert cwd in f_pth.read() - args = ['--uninstall', '-p', testing_env, extract_folder] - main_develop.execute(args) - with open(os.path.join(get_site_packages(testing_env, py_ver), 'conda.pth')) as f_pth: - assert cwd not in f_pth.read() - - -@pytest.mark.xfail(on_win, reason="This is a flaky test that doesn't seem to be working well on Windows.") -def test_convert(testing_workdir, testing_config): - # download a sample py2.7 package - f = 'https://repo.anaconda.com/pkgs/free/win-64/affine-2.0.0-py27_0.tar.bz2' - pkg_name = "affine-2.0.0-py27_0.tar.bz2" - download(f, pkg_name) - # convert it to all platforms - args = ['-o', 'converted', '--platform', 'all', pkg_name] - main_convert.execute(args) - platforms = ['osx-64', 'win-32', 'linux-64', 'linux-32'] - for platform in platforms: - dirname = os.path.join('converted', platform) - if platform != 'win-64': - assert os.path.isdir(dirname) - assert pkg_name in os.listdir(dirname) - testing_config.host_subdir = platform - with TarCheck(os.path.join(dirname, pkg_name), config=testing_config) as tar: - tar.correct_subdir() - else: - assert not os.path.isdir(dirname) - - -@pytest.mark.serial -def test_purge(testing_workdir, testing_metadata): - """ - purge clears out build folders - things like some_pkg_12048309850135 - - It does not clear out build packages from folders like osx-64 or linux-64. - """ - api.output_yaml(testing_metadata, 'meta.yaml') - outputs = api.build(testing_workdir, notest=True) - args = ['purge'] - main_build.execute(args) - dirs = get_build_folders(testing_metadata.config.croot) - assert not dirs - # make sure artifacts are kept - only temporary folders get nuked - assert all(os.path.isfile(fn) for fn in outputs) - - -@pytest.mark.serial -def test_purge_all(testing_workdir, testing_metadata): - """ - purge-all clears out build folders as well as build packages in the osx-64 folders and such - """ - api.output_yaml(testing_metadata, 'meta.yaml') - with TemporaryDirectory() as tmpdir: - testing_metadata.config.croot = tmpdir - outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) - args = ['purge-all', '--croot', tmpdir] - main_build.execute(args) - assert not get_build_folders(testing_metadata.config.croot) - assert not any(os.path.isfile(fn) for fn in outputs) - - -@pytest.mark.serial -def test_no_force_upload(mocker, testing_workdir, testing_metadata, request): - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - f.write('anaconda_upload: True\n') - f.write('conda_build:\n') - f.write(' force_upload: False\n') - del testing_metadata.meta['test'] - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--no-force-upload', testing_workdir] - call = mocker.patch.object(conda_build.build.subprocess, 'call') - request.addfinalizer(_reset_config) - _reset_config([os.path.join(testing_workdir, '.condarc')]) - main_build.execute(args) - pkg = api.get_output_file_path(testing_metadata) - assert call.called_once_with(['anaconda', 'upload', pkg]) - args = [testing_workdir] - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - f.write('anaconda_upload: True\n') - main_build.execute(args) - assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) - - -@pytest.mark.slow -def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): - monkeypatch.setenv('CONDA_PY', '36') - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - api.output_yaml(testing_metadata, 'meta.yaml') - outputs = api.build(testing_workdir, notest=True) - assert any('py36' in output for output in outputs) - - -def test_build_skip_existing(testing_workdir, capfd, mocker): - # build the recipe first - empty_sections = os.path.join(metadata_dir, "empty_sections") - args = ['--no-anaconda-upload', empty_sections] - main_build.execute(args) - args.insert(0, '--skip-existing') - import conda_build.source - provide = mocker.patch.object(conda_build.source, 'provide') - main_build.execute(args) - provide.assert_not_called() - output, error = capfd.readouterr() - assert ("are already built" in output or "are already built" in error) - - -def test_build_skip_existing_croot(testing_workdir, capfd): - # build the recipe first - empty_sections = os.path.join(metadata_dir, "empty_sections") - args = ['--no-anaconda-upload', '--croot', testing_workdir, empty_sections] - main_build.execute(args) - args.insert(0, '--skip-existing') - main_build.execute(args) - output, error = capfd.readouterr() - assert "are already built" in output - - -@pytest.mark.sanity -def test_package_test(testing_workdir, testing_metadata): - """Test calling conda build -t - rather than """ - api.output_yaml(testing_metadata, 'recipe/meta.yaml') - output = api.build(testing_workdir, config=testing_metadata.config, notest=True)[0] - args = ['-t', output] - main_build.execute(args) - - -def test_activate_scripts_not_included(testing_workdir): - recipe = os.path.join(metadata_dir, '_activate_scripts_not_included') - args = ['--no-anaconda-upload', '--croot', testing_workdir, recipe] - main_build.execute(args) - out = api.get_output_file_paths(recipe, croot=testing_workdir)[0] - for f in ('bin/activate', 'bin/deactivate', 'bin/conda', - 'Scripts/activate.bat', 'Scripts/deactivate.bat', 'Scripts/conda.bat', - 'Scripts/activate.exe', 'Scripts/deactivate.exe', 'Scripts/conda.exe', - 'Scripts/activate', 'Scripts/deactivate', 'Scripts/conda'): - assert not package_has_file(out, f) - - -def test_relative_path_croot(): - # this tries to build a package while specifying the croot with a relative path: - # conda-build --no-test --croot ./relative/path - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - args = ['--no-anaconda-upload', '--croot', croot_rel, empty_sections] - outputfile = main_build.execute(args) - - assert len(outputfile) == 1 - assert os.path.isfile(outputfile[0]) - - -def test_relative_path_test_artifact(): - # this test builds a package into (cwd)/relative/path and then calls: - # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) - - # build the package - args = ['--no-anaconda-upload', '--no-test', '--croot', croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 - - output_file_rel = os.path.join(croot_rel, os.path.relpath(output_file_abs[0], croot_abs)) - - # run the test stage with relative path - args = ['--no-anaconda-upload', '--test', output_file_rel] - main_build.execute(args) - - -def test_relative_path_test_recipe(): - # this test builds a package into (cwd)/relative/path and then calls: - # conda-build --test --croot ./relative/path/ /abs/path/to/recipe - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) - - # build the package - args = ['--no-anaconda-upload', '--no-test', '--croot', croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 - - # run the test stage with relative croot - args = ['--no-anaconda-upload', '--test', '--croot', croot_rel, empty_sections] - main_build.execute(args) - - -@pytest.mark.slow -def test_render_with_python_arg_reduces_subspace(capfd): - recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") - # build the package - args = [recipe, '--python=2.7', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 2 - - args = [recipe, '--python=3.9', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 1 - - # should raise an error, because python 3.6 is not in the matrix, so we don't know which vc - # to associate with - args = [recipe, '--python=3.6', '--output'] - with pytest.raises(ValueError): - main_render.execute(args) - - -def test_render_with_python_arg_CLI_reduces_subspace(capfd): - recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") - # build the package - args = [recipe, '--variants', '{python: [2.7, 3.9]}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 3 - - args = [recipe, '--variants', '{python: 2.7}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 2 - - args = [recipe, '--variants', '{python: 3.9}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 1 - - -def test_test_extra_dep(testing_metadata): - testing_metadata.meta['test']['imports'] = ['imagesize'] - api.output_yaml(testing_metadata, 'meta.yaml') - output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] - - # tests version constraints. CLI would quote this - "click <6.7" - args = [output, '-t', '--extra-deps', 'imagesize <1.0'] - # extra_deps will add it in - main_build.execute(args) - - # missing click dep will fail tests - with pytest.raises(SystemExit): - args = [output, '-t'] - # extra_deps will add it in - main_build.execute(args) - - -@pytest.mark.parametrize( - 'additional_args, is_long_test_prefix', - [ - ([], True), - (['--long-test-prefix'], True), - (['--no-long-test-prefix'], False) - ], -) -def test_long_test_prefix(additional_args, is_long_test_prefix): - args = ['non_existing_recipe'] + additional_args - parser, args = main_build.parse_args(args) - config = Config(**args.__dict__) - assert config.long_test_prefix is is_long_test_prefix - - -@pytest.mark.serial -@pytest.mark.parametrize( - 'zstd_level_condarc, zstd_level_cli', - [ - (None, None), - (1, None), - (1, 2), - ], -) -def test_zstd_compression_level(testing_workdir, request, zstd_level_condarc, zstd_level_cli): - assert zstd_compression_level_default not in {zstd_level_condarc, zstd_level_cli} - if zstd_level_condarc: - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - print( - 'conda_build:', - f' zstd_compression_level: {zstd_level_condarc}', - sep='\n', - file=f, - ) - request.addfinalizer(_reset_config) - _reset_config([os.path.join(testing_workdir, '.condarc')]) - args = ['non_existing_recipe'] - if zstd_level_cli: - args.append(f'--zstd-compression-level={zstd_level_cli}') - parser, args = main_build.parse_args(args) - config = Config(**args.__dict__) - if zstd_level_cli: - assert config.zstd_compression_level == zstd_level_cli - elif zstd_level_condarc: - assert config.zstd_compression_level == zstd_level_condarc - else: - assert config.zstd_compression_level == zstd_compression_level_default - - -def test_user_warning(tmpdir, recwarn): - dir_recipe_path = tmpdir.mkdir("recipe-path") - recipe = dir_recipe_path.join("meta.yaml") - recipe.write("") - - main_build.parse_args([str(recipe)]) - assert ( - f"RECIPE_PATH received is a file ({recipe}).\n" - "It should be a path to a folder.\n" - "Forcing conda-build to use the recipe file." - ) == str(recwarn.pop(UserWarning).message) - - main_build.parse_args([str(dir_recipe_path)]) - assert not recwarn.list diff --git a/tests/test_codesigned.py b/tests/test_codesigned.py new file mode 100644 index 0000000000..3ed13086da --- /dev/null +++ b/tests/test_codesigned.py @@ -0,0 +1,97 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +from functools import lru_cache +from pathlib import Path +from shutil import which +from subprocess import CalledProcessError, check_output, run + +import pytest + +from conda_build.utils import on_win + +HERE = os.path.abspath(os.path.dirname(__file__)) +REPO_ROOT = (Path(HERE) / "..").resolve().absolute() +STUB_FOLDER = REPO_ROOT / "conda_build" + + +@lru_cache(maxsize=None) +def find_signtool() -> str | None: + """Tries to find signtool + + Prefers signtool on PATH otherwise searches system. + Ref: + - https://learn.microsoft.com/en-us/dotnet/framework/tools/signtool-exe + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/signtool + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/using-signtool-to-verify-a-file-signature + """ + signtool_path = which("signtool") + if signtool_path: + return signtool_path + + # Common installation directories where signtool might be located + common_paths = [ + "C:\\Program Files (x86)\\Windows Kits\\10\\bin", + "C:\\Program Files\\Windows Kits\\10\\bin", + "C:\\Windows\\System32", + ] + + signtool_path = None + # Search for signtool in common paths + for path in common_paths: + if signtool_path: + # We found one already + return signtool_path + if not os.path.exists(path): + continue + signtool_path = os.path.join(path, "signtool.exe") + if os.path.exists(signtool_path): + return signtool_path + elif "Windows Kits" in path: + signtool_path = None + max_version = 0 + for dirname in os.listdir(path): + # Use most recent signtool version + if not dirname.endswith(".0"): + continue # next dirname + if int(dirname.replace(".", "")) < max_version: + continue # next dirname + + maybe_signtool_path = os.path.join(path, dirname, "x64", "signtool.exe") + if os.path.exists(maybe_signtool_path): + signtool_path = maybe_signtool_path + return signtool_path + + +@lru_cache(maxsize=None) +def signtool_unsupported_because() -> str: + reason = "" + if not on_win: + reason = "Only verifying signatures of stub exe's on windows" + return reason + signtool = find_signtool() + if not signtool: + reason = "signtool: unable to locate signtool.exe" + try: + check_output([signtool, "verify", "/?"]) + except CalledProcessError as exc: + reason = f"signtool: something went wrong while running 'signtool verify /?', output:\n{exc.output}\n" + return reason + + +def signtool_unsupported() -> bool: + return bool(signtool_unsupported_because()) + + +@pytest.mark.skipif(signtool_unsupported(), reason=signtool_unsupported_because()) +@pytest.mark.parametrize( + "stub_file_name", ["cli-32.exe", "cli-64.exe", "gui-32.exe", "gui-64.exe"] +) +def test_stub_exe_signatures(stub_file_name: str) -> None: + """Verify that signtool verifies the signature of the stub exes""" + stub_file = STUB_FOLDER / stub_file_name + signtool_exe = find_signtool() + completed_process = run([signtool_exe, "verify", "/pa", "/v", stub_file]) + assert completed_process.returncode == 0 diff --git a/tests/test_conda_interface.py b/tests/test_conda_interface.py deleted file mode 100644 index ea56b3062c..0000000000 --- a/tests/test_conda_interface.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from conda_build import conda_interface as ci - - -def test_get_installed_version(): - versions = ci.get_installed_version(ci.root_dir, 'conda') - assert versions.get('conda') - assert ci.VersionOrder(versions.get('conda')) diff --git a/tests/test_config.py b/tests/test_config.py index 4dce1e63a2..fa362a0b4f 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,22 +2,22 @@ # SPDX-License-Identifier: BSD-3-Clause import os import sys +from pathlib import Path import pytest from conda_build.config import Config, get_or_merge_config -from conda_build.conda_interface import TemporaryDirectory from conda_build.utils import on_win @pytest.fixture -def config(): +def config() -> Config: """a tiny bit of a fixture to save us from manually creating a new Config each test""" return Config() @pytest.fixture -def build_id(): +def build_id() -> str: """Small support fixture for setting build id's in multiple builds which may need them""" return "test123" @@ -25,31 +25,39 @@ def build_id(): def test_set_build_id(config, build_id): config.build_id = build_id # windows always uses the short prefix due to its limitation of 260 char paths - if sys.platform == 'win32': + if sys.platform == "win32": assert config.host_prefix == os.path.join(config.croot, build_id, "_h_env") else: - long_prefix = os.path.join(config.croot, build_id, - "_h_env" + "_placehold" * 25)[:config.prefix_length] + long_prefix = os.path.join( + config.croot, build_id, "_h_env" + "_placehold" * 25 + )[: config.prefix_length] assert config.host_prefix == long_prefix -def test_keep_old_work(config, build_id): +def test_keep_old_work(config: Config, build_id: str, tmp_path: Path): config.keep_old_work = True - with TemporaryDirectory() as temp_dir: - config.croot = temp_dir - config.build_id = build_id - work_path = os.path.join(temp_dir, build_id, "work") - os.makedirs(work_path) - # assert False - assert len(os.listdir(config.work_dir)) == 0 - with open(os.path.join(work_path, 'a_touched_file.magic'), 'w') as _: - # Touch a random file so the "work_dir" is not empty - pass - assert len(os.listdir(config.work_dir)) > 0 - config.compute_build_id("a_new_name", reset=True) - assert config.work_dir != work_path - assert not os.path.exists(work_path) - assert len(os.listdir(config.work_dir)) > 0 + config.croot = tmp_path + config.build_id = build_id + + magic = "a_touched_file.magic" + + # empty working directory + orig_dir = Path(config.work_dir) + assert orig_dir.exists() + assert not len(os.listdir(config.work_dir)) + + # touch a file so working directory is not empty + (orig_dir / magic).touch() + assert orig_dir.exists() + assert len(os.listdir(config.work_dir)) == 1 + assert Path(config.work_dir, magic).exists() + + config.compute_build_id("a_new_name", reset=True) + + # working directory should still exist (in new location) and have the touched file + assert not orig_dir.exists() + assert len(os.listdir(config.work_dir)) == 1 + assert Path(config.work_dir, magic).exists() @pytest.mark.skipif(on_win, reason="Windows uses only the short prefix") @@ -65,7 +73,7 @@ def test_long_build_prefix_length(config): def test_long_test_prefix_length(config): # defaults to True in conda-build 3.0+ assert config.long_test_prefix - assert '_plac' in config.test_prefix + assert "_plac" in config.test_prefix config.long_test_prefix = True # The length of the testing prefix is reduced by 2 characters to check if the null # byte padding causes issues @@ -81,32 +89,32 @@ def test_build_id_at_end_of_long_build_prefix(config, build_id): def test_create_config_with_subdir(): - config = Config(host_subdir='steve-128') - assert config.host_platform == 'steve' - assert config.host_subdir == 'steve-128' + config = Config(host_subdir="steve-128") + assert config.host_platform == "steve" + assert config.host_subdir == "steve-128" def test_set_platform(config): - config.host_platform = 'steve' + config.host_platform = "steve" arch = config.arch - assert config.host_subdir == 'steve-' + str(arch) + assert config.host_subdir == "steve-" + str(arch) def test_set_subdir(config): - config.host_subdir = 'steve' + config.host_subdir = "steve" arch = config.arch - assert config.host_subdir == 'steve-' + str(arch) - assert config.host_platform == 'steve' + assert config.host_subdir == "steve-" + str(arch) + assert config.host_platform == "steve" - config.host_subdir = 'steve-128' - assert config.host_subdir == 'steve-128' - assert config.host_platform == 'steve' - assert config.host_arch == '128' + config.host_subdir = "steve-128" + assert config.host_subdir == "steve-128" + assert config.host_platform == "steve" + assert config.host_arch == "128" def test_set_bits(config): config.host_arch = 128 - assert config.host_subdir == config.platform + '-' + str(128) + assert config.host_subdir == config.platform + "-" + str(128) assert config.host_arch == 128 diff --git a/tests/test_cpan_skeleton.py b/tests/test_cpan_skeleton.py new file mode 100644 index 0000000000..1b02331f4b --- /dev/null +++ b/tests/test_cpan_skeleton.py @@ -0,0 +1,27 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Unit tests of the CPAN skeleton utility functions +""" + +from pathlib import Path + +import pytest + +from conda_build.skeletons.cpan import get_core_modules_for_this_perl_version +from conda_build.variants import get_default_variant + + +@pytest.mark.slow +def test_core_modules(testing_config): + """ + Check expected core modules are recognized + (excluding known removed ones, e.g., Module::Build) + """ + cache_dir = Path(testing_config.src_cache_root, ".conda-build", "pickled.cb") + perl_version = testing_config.variant.get( + "perl", get_default_variant(testing_config)["perl"] + ) + core_modules = get_core_modules_for_this_perl_version(perl_version, str(cache_dir)) + assert "Config" in core_modules + assert "Module::Build" not in core_modules diff --git a/tests/test_cran_skeleton.py b/tests/test_cran_skeleton.py index 2116fff0f7..0db839a8f4 100644 --- a/tests/test_cran_skeleton.py +++ b/tests/test_cran_skeleton.py @@ -1,42 +1,118 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Unit tests of the CRAN skeleton utility functions -''' - +""" import os + import pytest +from conda.auxlib.ish import dals from conda_build.license_family import allowed_license_families -from conda_build.skeletons.cran import (get_license_info, - read_description_contents, - remove_comments) - - -thisdir = os.path.dirname(os.path.realpath(__file__)) - +from conda_build.skeletons.cran import ( + get_license_info, + read_description_contents, + remove_comments, +) -# (license_string, license_id, license_family, license_files) -cran_licenses = [('GPL-3', 'GPL-3', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\''), - ('Artistic License 2.0', 'Artistic-2.0', 'OTHER', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/Artistic-2.0\''), - ('MPL-2.0', 'MPL-2.0', 'OTHER', ''), - ('MIT + file LICENSE', 'MIT', 'MIT', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT\'\n - LICENSE'), - ('BSD 2-clause License + file LICENSE', 'BSD_2_clause', 'BSD', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_2_clause\'\n - LICENSE'), - ('GPL-2 | GPL-3', 'GPL-2 | GPL-3', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\'\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\''), - ('GPL-3 | GPL-2', 'GPL-3 | GPL-2', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\'\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\''), - ('GPL (>= 2)', 'GPL-2', 'GPL2', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\''), - ] +from .utils import cran_dir -@pytest.mark.parametrize("license_string, license_id, license_family, license_files", cran_licenses) +@pytest.mark.parametrize( + "license_string, license_id, license_family, license_files", + [ + pytest.param( + "GPL-3", + "GPL-3", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + """ + ), + id="GPL-3", + ), + pytest.param( + "Artistic License 2.0", + "Artistic-2.0", + "OTHER", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/Artistic-2.0' + """ + ), + id="Artistic-2.0", + ), + pytest.param("MPL-2.0", "MPL-2.0", "OTHER", "", id="MPL-2.0"), + pytest.param( + "MIT + file LICENSE", + "MIT", + "MIT", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT' + - LICENSE + """ + ), + id="MIT", + ), + pytest.param( + "BSD 2-clause License + file LICENSE", + "BSD_2_clause", + "BSD", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_2_clause' + - LICENSE + """ + ), + id="BSD_2_clause", + ), + pytest.param( + "GPL-2 | GPL-3", + "GPL-2 | GPL-3", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + """ + ), + id="GPL-2 | GPL-3", + ), + pytest.param( + "GPL-3 | GPL-2", + "GPL-3 | GPL-2", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + """ + ), + id="GPL-3 | GPL-2", + ), + pytest.param( + "GPL (>= 2)", + "GPL-2", + "GPL2", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + """ + ), + id="GPL-2", + ), + ], +) def test_get_license_info(license_string, license_id, license_family, license_files): observed = get_license_info(license_string, allowed_license_families) assert observed[0] == license_id @@ -45,29 +121,35 @@ def test_get_license_info(license_string, license_id, license_family, license_fi def test_read_description_contents(): - description = os.path.join(thisdir, 'test-cran-skeleton', 'rpart', 'DESCRIPTION') - with open(description, 'rb') as fp: + description = os.path.join(cran_dir, "rpart", "DESCRIPTION") + with open(description, "rb") as fp: contents = read_description_contents(fp) - assert contents['Package'] == 'rpart' - assert contents['Priority'] == 'recommended' - assert contents['Title'] == 'Recursive Partitioning and Regression Trees' - assert contents['Depends'] == 'R (>= 2.15.0), graphics, stats, grDevices' - assert contents['License'] == 'GPL-2 | GPL-3' - assert contents['URL'] == 'https://github.com/bethatkinson/rpart, https://cran.r-project.org/package=rpart' + assert contents["Package"] == "rpart" + assert contents["Priority"] == "recommended" + assert contents["Title"] == "Recursive Partitioning and Regression Trees" + assert contents["Depends"] == "R (>= 2.15.0), graphics, stats, grDevices" + assert contents["License"] == "GPL-2 | GPL-3" + assert ( + contents["URL"] + == "https://github.com/bethatkinson/rpart, https://cran.r-project.org/package=rpart" + ) def test_remove_comments(): - example = ''' -#!keep -# remove - # remove -keep -keep # keep -''' - expected = ''' -#!keep -keep -keep # keep -''' - observed = remove_comments(example) - assert observed == expected + with_comments = dals( + """ + #!keep + # remove + # remove + keep + keep # keep + """ + ) + without_comments = dals( + """ + #!keep + keep + keep # keep + """ + ) + assert remove_comments(with_comments) == without_comments diff --git a/tests/test_create_test.py b/tests/test_create_test.py index 69877a72e8..c7ea321cc1 100644 --- a/tests/test_create_test.py +++ b/tests/test_create_test.py @@ -1,126 +1,151 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os - -from conda_build import create_test as ct - - -def test_create_py_files_with_py_imports(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['time', 'datetime'] - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - - -def test_create_py_files_in_other_language(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'python', 'imports': ['time', 'datetime']}] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - - -def test_create_py_files_in_other_language_multiple_python_dicts(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'python', 'imports': ['time', 'datetime']}] - testing_metadata.meta['test']['imports'].append({'lang': 'python', - 'imports': ['bokeh', 'holoviews']}) - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - assert 'import bokeh\n' in data - assert 'import holoviews\n' in data - - -def test_create_r_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['r-base', 'r-matrix'] - testing_metadata.meta['package']['name'] = 'r-conda-test' - ct.create_r_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.r') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'library(r-base)\n' in data - assert 'library(r-matrix)\n' in data - - -def test_create_r_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'r', 'imports': ['r-base', 'r-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-r' - ct.create_r_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.r') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'library(r-base)\n' in data - assert 'library(r-matrix)\n' in data - - -def test_create_pl_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['perl-base', 'perl-matrix'] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_pl_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.pl') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'use perl-base;\n' in data - assert 'use perl-matrix;\n' in data - - -def test_non_py_does_not_create_py_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['perl-base', 'perl-matrix'] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - py_test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert not os.path.isfile(py_test_file), "non-python package should not create run_test.py" - - -def test_create_pl_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'perl', 'imports': ['perl-base', - 'perl-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-perl' - ct.create_pl_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.pl') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'use perl-base;\n' in data - assert 'use perl-matrix;\n' in data - - -def test_create_lua_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['lua-base', 'lua-matrix'] - testing_metadata.meta['package']['name'] = 'lua-conda-test' - ct.create_lua_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.lua') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'require "lua-base"\n' in data - assert 'require "lua-matrix"\n' in data - - -def test_create_lua_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'lua', 'imports': ['lua-base', - 'lua-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-lua' - ct.create_lua_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.lua') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'require "lua-base"\n' in data - assert 'require "lua-matrix"\n' in data +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from conda_build.create_test import ( + create_lua_files, + create_pl_files, + create_py_files, + create_r_files, +) + +if TYPE_CHECKING: + from typing import Any + + +@pytest.mark.parametrize( + "name,imports,expected,unexpected", + [ + pytest.param( + "name", + ["time", "datetime"], + {".py": {"import time", "import datetime"}}, + {".r", ".pl", ".lua"}, + id="implicit Python imports", + ), + pytest.param( + "r-name", + [{"lang": "python", "imports": ["time", "datetime"]}], + {".r": set(), ".py": {"import time", "import datetime"}}, + {".pl", ".lua"}, + id="explicit Python imports", + ), + pytest.param( + "r-name", + [ + {"lang": "python", "imports": ["time"]}, + {"lang": "python", "imports": ["datetime"]}, + ], + {".r": set(), ".py": {"import time", "import datetime"}}, + {".pl", ".lua"}, + id="multiple explicit Python imports", + ), + pytest.param( + "r-name", + ["r-time", "r-datetime"], + {".r": {"library(r-time)", "library(r-datetime)"}}, + {".py", ".pl", ".lua"}, + id="implicit R imports", + ), + pytest.param( + "perl-name", + [{"lang": "r", "imports": ["r-time", "r-datetime"]}], + {".pl": set(), ".r": {"library(r-time)", "library(r-datetime)"}}, + {".py", ".lua"}, + id="explicit R imports", + ), + # unsupported syntax, why? + # pytest.param( + # "perl-name", + # [ + # {"lang": "r", "imports": ["r-time"]}, + # {"lang": "r", "imports": ["r-datetime"]}, + # ], + # {".r": {"library(r-time)", "library(r-datetime)"}}, + # {".py", ".pl", ".lua"}, + # id="multiple explicit R imports", + # ), + pytest.param( + "perl-name", + ["perl-time", "perl-datetime"], + {".pl": {"use perl-time;", "use perl-datetime;"}}, + {".py", ".r", ".lua"}, + id="implicit Perl imports", + ), + pytest.param( + "lua-name", + [{"lang": "perl", "imports": ["perl-time", "perl-datetime"]}], + {".lua": set(), ".pl": {"use perl-time;", "use perl-datetime;"}}, + {".py", ".r"}, + id="explicit Perl imports", + ), + # unsupported syntax, why? + # pytest.param( + # "lua-name", + # [ + # {"lang": "perl", "imports": ["perl-time"]}, + # {"lang": "perl", "imports": ["perl-datetime"]}, + # ], + # {".pl": {"use perl-time;", "use perl-datetime;"}}, + # {".py", ".r", ".lua"}, + # id="multiple explicit Perl imports", + # ), + pytest.param( + "lua-name", + ["lua-time", "lua-datetime"], + {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + {".py", ".r", ".pl"}, + id="implicit Lua imports", + ), + # why is this test different from the other explicit imports? + pytest.param( + "name", + [{"lang": "lua", "imports": ["lua-time", "lua-datetime"]}], + {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + {".py", ".r", ".pl"}, + id="explicit Lua imports", + ), + # unsupported syntax, why? + # pytest.param( + # "name", + # [ + # {"lang": "lua", "imports": ["lua-time"]}, + # {"lang": "lua", "imports": ["lua-datetime"]}, + # ], + # {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + # {".py", ".r", ".pl"}, + # id="multiple explicit Lua imports", + # ), + ], +) +def test_create_run_test( + name: str, + imports: Any, + expected: dict[str, set[str]], + unexpected: set[str], + testing_metadata, +): + testing_metadata.meta["package"]["name"] = name + testing_metadata.meta["test"]["imports"] = imports + create_py_files(testing_metadata, testing_metadata.config.test_dir) + create_r_files(testing_metadata, testing_metadata.config.test_dir) + create_pl_files(testing_metadata, testing_metadata.config.test_dir) + create_lua_files(testing_metadata, testing_metadata.config.test_dir) + + # assert expected test file exists + for ext, tests in expected.items(): + test_file = Path(testing_metadata.config.test_dir, "run_test").with_suffix(ext) + assert test_file.is_file() + + # ensure all tests (for this language/ext) are present in the test file + assert tests <= set(filter(None, test_file.read_text().split("\n"))) + + # assert unexpected test files do not exist + for ext in unexpected: + test_file = Path(testing_metadata.config.test_dir, "run_test").with_suffix(ext) + assert not test_file.exists() diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py new file mode 100644 index 0000000000..35383913fb --- /dev/null +++ b/tests/test_deprecations.py @@ -0,0 +1,200 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import sys +from argparse import ArgumentParser, _StoreTrueAction +from contextlib import nullcontext +from typing import TYPE_CHECKING + +import pytest + +from conda_build.deprecations import DeprecatedError, DeprecationHandler + +if TYPE_CHECKING: + from packaging.version import Version + + from conda_build.deprecations import DevDeprecationType, UserDeprecationType + +PENDING = pytest.param( + DeprecationHandler("1.0"), # deprecated + PendingDeprecationWarning, # warning + "pending deprecation", # message + id="pending", +) +FUTURE = pytest.param( + DeprecationHandler("2.0"), # deprecated + FutureWarning, # warning + "deprecated", # message + id="future", +) +DEPRECATED = pytest.param( + DeprecationHandler("2.0"), # deprecated + DeprecationWarning, # warning + "deprecated", # message + id="deprecated", +) +REMOVE = pytest.param( + DeprecationHandler("3.0"), # deprecated + None, # warning + None, # message + id="remove", +) + +parametrize_user = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, FUTURE, REMOVE], +) +parametrize_dev = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, DEPRECATED, REMOVE], +) + + +@parametrize_dev +def test_function( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated function displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + @deprecated("2.0", "3.0") + def foo(): + return True + + with pytest.warns(warning, match=message): + assert foo() + + +@parametrize_dev +def test_method( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated method displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + class Bar: + @deprecated("2.0", "3.0") + def foo(self): + return True + + with pytest.warns(warning, match=message): + assert Bar().foo() + + +@parametrize_dev +def test_class( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated class displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + @deprecated("2.0", "3.0") + class Foo: + pass + + with pytest.warns(warning, match=message): + assert Foo() + + +@parametrize_dev +def test_arguments( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argument displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + @deprecated.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.warns(warning, match=message): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +@parametrize_user +def test_action( + deprecated: DeprecationHandler, + warning: UserDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argparse.Action displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + parser = ArgumentParser() + parser.add_argument( + "--foo", + action=deprecated.action("2.0", "3.0", _StoreTrueAction), + ) + + with pytest.warns(warning, match=message): + parser.parse_args(["--foo"]) + + +@parametrize_dev +def test_module( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Importing a deprecated module displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.module("2.0", "3.0") + + +@parametrize_dev +def test_constant( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Using a deprecated constant displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + deprecated.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + with pytest.warns(warning, match=message): + module.SOME_CONSTANT + + +@parametrize_dev +def test_topic( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Reaching a deprecated topic displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.topic("2.0", "3.0", topic="Some special topic") + + +def test_version_fallback() -> None: + """Test that conda can run even if deprecations can't parse the version.""" + deprecated = DeprecationHandler(None) # type: ignore[arg-type] + assert deprecated._version_less_than("0") + assert deprecated._version_tuple is None + version: Version = deprecated._version_object # type: ignore[assignment] + assert version.major == version.minor == version.micro == 0 diff --git a/tests/test_develop.py b/tests/test_develop.py index f10d19e9a1..d72bb247d3 100644 --- a/tests/test_develop.py +++ b/tests/test_develop.py @@ -1,113 +1,101 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Simple tests for testing functions in develop module - lower level than going through API. -''' -import os -from os.path import dirname, join, exists +""" + +from pathlib import Path +from typing import Generator + +import pytest from conda_build.develop import _uninstall, write_to_conda_pth from conda_build.utils import rm_rf -import pytest +from .utils import thisdir @pytest.fixture(scope="session") -def sp_dir(request): - ''' +def site_packages() -> Generator[Path, None, None]: + """ create site-packges/ directory in same place where test is located. This is where tests look conda.pth file. It is a session scoped fixture and it has a finalizer function invoked in the end to remove site-packages/ directory - ''' - base_dir = dirname(__file__) - sp = join(base_dir, 'site-packages') - if exists(sp): - rm_rf(sp) + """ + site_packages = Path(thisdir, "site-packages") + if site_packages.exists(): + rm_rf(str(site_packages)) - os.mkdir(sp) + site_packages.mkdir(exist_ok=True) - def cleanup(): - # session scoped cleanup is called at end of the session - rm_rf(sp) + yield site_packages - request.addfinalizer(cleanup) - - return sp + rm_rf(str(site_packages)) @pytest.fixture(scope="function") -def conda_pth(sp_dir): - ''' +def conda_pth(site_packages: Path) -> Generator[Path, None, None]: + """ Returns the path to conda.pth - though we don't expect name to change from conda.pth, better to keep this in one place Removes 'conda.pth' if it exists so each test starts without a conda.pth file - ''' - pth = join(sp_dir, 'conda.pth') - if exists(pth): - os.remove(pth) + """ + path = site_packages / "conda.pth" + if path.exists(): + path.unlink() - return pth + yield path + if path.exists(): + path.unlink() -# Note: following list is data used for testing - do not change it -_path_in_dev_mode = ['/Users/jsandhu/Documents/projects/CythonExample', - '/Users/jsandhu/Documents/projects/TestOne', - '/Users/jsandhu/Documents/projects/TestOne', - '/Users/jsandhu/Documents/projects/TestTwo'] -# following list of tuples contains the path and the number of lines -# added/remaining after invoking develop/uninstall. -# These are used to make assertions -_toadd_and_num_after_install = zip(_path_in_dev_mode, (1, 2, 2, 3)) -_torm_and_num_after_uninstall = zip(_path_in_dev_mode, (2, 1, 1, 0)) +DEVELOP_PATHS = ("/path/to/one", "/path/to/two", "/path/to/three") -def test_write_to_conda_pth(sp_dir, conda_pth): - ''' +def test_write_to_conda_pth(site_packages: Path, conda_pth: Path): + """ `conda develop pkg_path` invokes write_to_conda_pth() to write/append to - conda.pth - this is a basic unit test for write_to_conda_pth - - :param str sp_dir: path to site-packages directory returned by fixture - :param str conda_pth: path to conda.pth returned by fixture - ''' - assert not exists(conda_pth) - - for pth, exp_num_pths in _toadd_and_num_after_install: - write_to_conda_pth(sp_dir, pth) - assert exists(conda_pth) - # write to path twice but ensure it only gets written to fine once - write_to_conda_pth(sp_dir, pth) - with open(conda_pth) as f: - lines = f.readlines() - assert (pth + '\n') in lines - assert len(lines) == exp_num_pths - - -def test_uninstall(sp_dir, conda_pth, request): - ''' + conda.pth + """ + assert not conda_pth.exists() + + for count, path in enumerate(DEVELOP_PATHS, start=1): + # adding path + write_to_conda_pth(site_packages, path) + assert conda_pth.exists() + + develop_paths = list(filter(None, conda_pth.read_text().split("\n"))) + assert path in develop_paths + assert len(develop_paths) == count + + # adding path a second time has no effect + write_to_conda_pth(site_packages, path) + + assert list(filter(None, conda_pth.read_text().split("\n"))) == develop_paths + + +def test_uninstall(site_packages: Path, conda_pth: Path): + """ `conda develop --uninstall pkg_path` invokes uninstall() to remove path - from conda.pth - this is a unit test for uninstall - - It also includes a cleanup function that deletes the conda.pth file - - :param str sp_dir: path to site-packages directory returned by fixture - :param str conda_pth: path to conda.pth returned by fixture - ''' - # first write data in conda.pth if it doesn't yet exist - # if all tests are invoked, then conda.pth exists - if not exists(conda_pth): - for pth in _path_in_dev_mode: - write_to_conda_pth(sp_dir, pth) - - for to_rm, exp_num_pths in _torm_and_num_after_uninstall: - # here's where the testing begins - _uninstall(sp_dir, to_rm) - assert exists(conda_pth) - - with open(conda_pth) as f: - lines = f.readlines() - assert to_rm + '\n' not in lines - assert len(lines) == exp_num_pths + from conda.pth + """ + for path in DEVELOP_PATHS: + write_to_conda_pth(site_packages, path) + + for count, path in enumerate(DEVELOP_PATHS, start=1): + # removing path + _uninstall(site_packages, path) + assert conda_pth.exists() + + develop_paths = list(filter(None, conda_pth.read_text().split("\n"))) + assert path not in develop_paths + assert len(develop_paths) == len(DEVELOP_PATHS) - count + + # removing path a second time has no effect + _uninstall(site_packages, path) + + assert list(filter(None, conda_pth.read_text().split("\n"))) == develop_paths diff --git a/tests/test_environ.py b/tests/test_environ.py index fed4cf0a17..f446420feb 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -2,11 +2,16 @@ # SPDX-License-Identifier: BSD-3-Clause import os -from conda_build import environ +from conda_build.environ import create_env def test_environment_creation_preserves_PATH(testing_workdir, testing_config): - ref_path = os.environ['PATH'] - environ.create_env(testing_workdir, ['python'], env='host', config=testing_config, - subdir=testing_config.build_subdir) - assert os.environ['PATH'] == ref_path + ref_path = os.environ["PATH"] + create_env( + testing_workdir, + ["python"], + env="host", + config=testing_config, + subdir=testing_config.build_subdir, + ) + assert os.environ["PATH"] == ref_path diff --git a/tests/test_index.py b/tests/test_index.py deleted file mode 100644 index 628717df77..0000000000 --- a/tests/test_index.py +++ /dev/null @@ -1,1206 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -See also https://github.com/conda-incubator/conda-index -""" -import json -import os -import shutil -import tarfile -from logging import getLogger -from os.path import dirname, isdir, isfile, join - -import conda_package_handling.api -import pytest -from unittest import mock -from conda_build.conda_interface import conda_47, context -from conda_build.utils import copy_into, rm_rf - -import conda_build.api -import conda_build.index - -from .utils import archive_dir - -log = getLogger(__name__) - -here = os.path.dirname(__file__) - -# NOTE: The recipes for test packages used in this module are at https://github.com/kalefranz/conda-test-packages - -# match ./index_hotfix_pkgs/ -TEST_SUBDIR = "osx-64" - - -def download(url, local_path): - # NOTE: The tests in this module used to download packages from the - # conda-test channel. These packages are small and are now included. - if not isdir(dirname(local_path)): - os.makedirs(dirname(local_path)) - - archive_path = join(here, "archives", url.rsplit("/", 1)[-1]) - - shutil.copy(archive_path, local_path) - return local_path - - -def test_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - } - }, - "subdirs": ["noarch", "osx-64"], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - updated_packages = expected_repodata_json.get("packages") - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - }, - }, - "subdirs": ["noarch", "osx-64"], - } - - assert actual_channeldata_json == expected_channeldata_json - - -def test_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "noarch", - }, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520039, - "keywords": None, - "recipe_origin": None, - } - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - # test threads=1 flow - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", threads=1 - ) - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": {"subdir": "noarch"}, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - } - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages per subdir here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "noarch", "flask-0.11.1-py_0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/noarch/flask-0.11.1-py_0.tar.bz2" - ) - download(test_package_url, test_package_path) - - # only tell index to index one of them and then assert that it was added - p = os.path.join(testing_workdir, "index_file") - with open(p, "a+") as fh: - fh.write("noarch/flask-0.11.1-py_0.tar.bz2\n") - fh.write("osx/fly-2.5.2-0.tar.bz2\n") - - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", index_file=p - ) - - updated_packages = expected_repodata_json.get("packages", {}) - updated_packages["flask-0.11.1-py_0.tar.bz2"] = { - "build": "py_0", - "build_number": 0, - "depends": [ - "click >=2.0", - "itsdangerous >=0.21", - "jinja2 >=2.4", - "python", - "werkzeug >=0.7", - ], - "license": "BSD", - "md5": "f85925da2dc4f3cc2771be01fd644023", - "name": "flask", - "noarch": "python", - "sha256": "096466b5ff6c243fccbafe75951dc9b1456569f31235882ff29f30064219339c", - "size": 30720, - "subdir": "noarch", - "version": "0.11.1", - } - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.load(fh) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520204, - "keywords": None, - "recipe_origin": None, - }, - "flask": { - "activate.d": False, - "binary_prefix": False, - "deactivate.d": False, - "description": "Flask is a microframework for Python based on Werkzeug and Jinja2. " - "It's intended for getting started very quickly and was developed with best intentions in mind.", - "dev_url": "https://github.com/mitsuhiko/flask", - "doc_source_url": None, - "doc_url": "http://flask.pocoo.org/docs/0.10/", - "home": "http://flask.pocoo.org/", - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "license": "BSD", - "post_link": False, - "pre_link": False, - "pre_unlink": False, - "recipe_origin": None, - "run_exports": {}, - "source_git_url": None, - "source_url": None, - "subdirs": ["noarch"], - "summary": "A microframework based on Werkzeug, Jinja2 and good intentions", - "tags": None, - "text_prefix": False, - "timestamp": 0, - "version": "0.11.1", - }, - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def _build_test_index(workdir): - """ - Copy repodata.json, packages to workdir for testing. - """ - - # Python 3.7 workaround "no dirs_exist_ok flag" - index_hotfix_pkgs = join(here, "index_hotfix_pkgs") - for path in os.scandir(index_hotfix_pkgs): - if path.is_dir(): - shutil.copytree( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - elif path.is_file(): - shutil.copyfile( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - - with open(os.path.join(workdir, TEST_SUBDIR, "repodata.json")) as f: - original_metadata = json.load(f) - - pkg_list = original_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -# SLOW -def test_gen_patch_py(testing_workdir): - """ - This is a channel-wide file that applies to many subdirs. It must have a function with this signature: - - def _patch_repodata(repodata, subdir): - - That function must return a dictionary of patch instructions, of the form: - - { - "patch_instructions_version": 1, - "packages": defaultdict(dict), - "revoke": [], - "remove": [], - } - - revoke and remove are lists of filenames. remove makes the file not show up - in the index (it may still be downloadable with a direct URL to the file). - revoke makes packages uninstallable by adding an unsatisfiable dependency. - This can be made installable by including a channel that has that package - (to be created by @jjhelmus). - - packages is a dictionary, where keys are package filenames. Values are - dictionaries similar to the contents of each package in repodata.json. Any - values in provided in packages here overwrite the values in repodata.json. - Any value set to None is removed. - """ - _build_test_index(testing_workdir) - - func = """ -def _patch_repodata(repodata, subdir): - pkgs = repodata["packages"] - import fnmatch - replacement_dict = {} - if "track_features_test-1.0-0.tar.bz2" in pkgs: - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - if "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkgs: - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": pkgs["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] + ["dummy"], - "features": None} - revoke_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "revoke_test*")] - remove_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "remove_test*")] - return { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": revoke_list, - "remove": remove_list, - } -""" - patch_file = os.path.join(testing_workdir, "repodata_patch.py") - with open(patch_file, "w") as f: - f.write(func) - - # indexing a second time with the same patchset should keep the removals - for i in (1, 2): - conda_build.index.update_index( - testing_workdir, - patch_generator=patch_file, - verbose=True, - ) - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - print("pass %s track features ok" % i) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - print("pass %s hotfix ok" % i) - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - print("pass %s revoke ok" % i) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - assert "remove_test-1.0-0.tar.bz2" in patched_metadata["removed"], ( - "removed list not populated in run %d" % i - ) - print("pass %s remove ok" % i) - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_metadata = json.load(f) - - pkg_list = pkg_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert ( - pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - ) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert ( - pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - ) - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - -def test_channel_patch_instructions_json(testing_workdir): - _build_test_index(testing_workdir) - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "patch_instructions.json"), "w" - ) as f: - json.dump(patch, f) - - conda_build.index.update_index(testing_workdir) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - formats = (("packages", ".tar.bz2"), ("packages.conda", ".conda")) - - for key, ext in formats: - pkg_list = patched_metadata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0" + ext] - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert pkg_list["track_features_test-1.0-0" + ext]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext in pkg_list - - -def test_patch_from_tarball(testing_workdir): - """This is how we expect external communities to provide patches to us. - We can't let them just give us Python files for us to run, because of the - security risk of arbitrary code execution.""" - _build_test_index(testing_workdir) - - # our hotfix metadata can be generated any way you want. Hard-code this here, but in general, - # people will use some python file to generate this. - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - with open("patch_instructions.json", "w") as f: - json.dump(patch, f) - - with tarfile.open("patch_archive.tar.bz2", "w:bz2") as archive: - archive.add( - "patch_instructions.json", "%s/patch_instructions.json" % TEST_SUBDIR - ) - - conda_build.index.update_index( - testing_workdir, patch_generator="patch_archive.tar.bz2" - ) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -def test_index_of_removed_pkg(testing_metadata): - - archive_name = "test_index_of_removed_pkg-1.0-1.tar.bz2" - archive_destination = os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, archive_name - ) - - # copy the package - os.makedirs(os.path.join(testing_metadata.config.croot, TEST_SUBDIR)) - shutil.copy(os.path.join(here, "archives", archive_name), archive_destination) - - conda_build.api.update_index(testing_metadata.config.croot) - - # repodata.json should exist here - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert repodata["packages"] - - for f in [archive_destination]: - os.remove(f) - - # repodata.json should be empty here - conda_build.api.update_index(testing_metadata.config.croot) - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - with open( - os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, "repodata_from_packages.json" - ) - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - - -def test_patch_instructions_with_missing_subdir(testing_workdir): - os.makedirs("linux-64") - os.makedirs("zos-z") - conda_build.api.update_index(".") # what is the current working directory? - # we use conda-forge's patch instructions because they don't have zos-z data, and that triggers an error - pkg = "conda-forge-repodata-patches" - url = "https://anaconda.org/conda-forge/{0}/20180828/download/noarch/{0}-20180828-0.tar.bz2".format( - pkg - ) - patch_instructions = download(url, os.path.join(os.getcwd(), "patches.tar.bz2")) - conda_build.api.update_index(".", patch_generator=patch_instructions) - - -def test_stat_cache_used(testing_workdir, mocker): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - cph_extract.assert_not_called() - - -def test_new_pkg_format_preferred(testing_workdir, mocker): - """Test that in one pass, the .conda file is extracted before the .tar.bz2, and the .tar.bz2 uses the cache""" - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - exts = (".tar.bz2", ".conda") - for ext in exts: - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ext), - test_package_path + ext, - ) - # mock the extract function, so that we can assert that it is not called - # with the .tar.bz2, because the .conda should be preferred - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - # extract should get called once by default. Within a channel, we assume that a .tar.bz2 and .conda have the same contents. - cph_extract.assert_called_once_with(test_package_path + ".conda", mock.ANY, "info") - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - # if we clear the stat cache, we force a re-examination. This re-examination will load files - # from the cache. This has been a source of bugs in the past, where the wrong cached file - # being loaded resulted in incorrect hashes/sizes for either the .tar.bz2 or .conda, depending - # on which of those 2 existed in the cache. - rm_rf(os.path.join(testing_workdir, "osx-64", "stat.json")) - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", verbose=True, debug=True - ) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - assert actual_repodata_json == expected_repodata_json - - -def test_new_pkg_format_stat_cache_used(testing_workdir, mocker): - # if we have old .tar.bz2 index cache stuff, assert that we pick up correct md5, sha26 and size for .conda - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".tar.bz2"), - test_package_path + ".tar.bz2", - ) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # mock the extract function, so that we can assert that it is not called, because the stat cache should exist - # if this doesn't work, something about the stat cache is confused. It's a little convoluted, because - # the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available - # because extracting them is much, much faster. - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".conda"), - test_package_path + ".conda", - ) - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - cph_extract.assert_not_called() - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - -@pytest.mark.skipif( - not hasattr(context, "use_only_tar_bz2") or getattr(context, "use_only_tar_bz2"), - reason="conda is set to auto-disable .conda for old conda-build.", -) -def test_current_index_reduces_space(): - repodata = os.path.join( - os.path.dirname(__file__), "index_data", "time_cut", "repodata.json" - ) - with open(repodata) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 7 - assert len(repodata["packages.conda"]) == 3 - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, None - ) - - tar_bz2_keys = { - "two-because-satisfiability-1.2.11-h7b6447c_3.tar.bz2", - "two-because-satisfiability-1.2.10-h7b6447c_3.tar.bz2", - "depends-on-older-1.2.10-h7b6447c_3.tar.bz2", - "ancient-package-1.2.10-h7b6447c_3.tar.bz2", - "one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2", - } - # conda 4.7 removes .tar.bz2 files in favor of .conda files - if conda_47: - tar_bz2_keys.remove("one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2") - - # .conda files will replace .tar.bz2 files. Older packages that are necessary for satisfiability will remain - assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys - if conda_47: - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.3.10-h7b6447c_3.conda" - } - - # we can keep more than one version series using a collection of keys - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, {"one-gets-filtered": ["1.2", "1.3"]} - ) - if conda_47: - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.2.11-h7b6447c_3.conda", - "one-gets-filtered-1.3.10-h7b6447c_3.conda", - } - else: - assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys | { - "one-gets-filtered-1.2.11-h7b6447c_3.tar.bz2" - } - - -def test_current_index_version_keys_keep_older_packages(testing_workdir): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - - # pass no version file - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - # only the newest version is kept - assert len(repodata["packages"]) == 1 - assert list(repodata["packages"].values())[0]["version"] == "2.0" - - # pass version file - conda_build.api.update_index( - pkg_dir, current_index_versions=os.path.join(pkg_dir, "versions.yml") - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 2 - - # pass dict that is equivalent to version file - conda_build.api.update_index( - pkg_dir, current_index_versions={"dummy-package": ["1.0"]} - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert list(repodata["packages"].values())[0]["version"] == "1.0" - - -def test_channeldata_picks_up_all_versions_of_run_exports(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - run_exports = repodata["packages"]["run_exports_versions"]["run_exports"] - assert len(run_exports) == 2 - assert "1.0" in run_exports - assert "2.0" in run_exports - - -def test_index_invalid_packages(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "corrupt") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 0 diff --git a/tests/test_inspect.py b/tests/test_inspect.py index d2c4d812fd..cd90ba98ae 100644 --- a/tests/test_inspect.py +++ b/tests/test_inspect.py @@ -9,35 +9,25 @@ def test_inspect_linkages(): - if sys.platform == 'win32': + if sys.platform == "win32": with pytest.raises(SystemExit) as exc: out_string = api.inspect_linkages("python") - assert 'conda inspect linkages is only implemented in Linux and OS X' in exc + assert "conda inspect linkages is only implemented in Linux and OS X" in exc else: out_string = api.inspect_linkages("python") - assert 'libncursesw' in out_string + assert "libncursesw" in out_string def test_inspect_objects(): - if sys.platform != 'darwin': + if sys.platform != "darwin": with pytest.raises(SystemExit) as exc: out_string = api.inspect_objects("python") - assert 'conda inspect objects is only implemented in OS X' in exc + assert "conda inspect objects is only implemented in OS X" in exc else: out_string = api.inspect_objects("python") - assert re.search('rpath:.*@loader_path', out_string) + assert re.search("rpath:.*@loader_path", out_string) def test_channel_installable(): # make sure the default channel is installable as a reference - assert api.test_installable('conda-team') - -# # create a channel that is not installable to validate function - -# platform = os.path.join(testing_workdir, subdir) -# output_file = os.path.join(platform, "empty_sections-0.0-0.tar.bz2") - -# # create the index so conda can find the file -# api.update_index(platform) - -# assert not api.test_installable(channel=to_url(testing_workdir)) + assert api.test_installable("conda-team") diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py new file mode 100644 index 0000000000..dae6d7f6ca --- /dev/null +++ b/tests/test_inspect_pkg.py @@ -0,0 +1,273 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json +import os +from pathlib import Path +from uuid import uuid4 + +import pytest +from conda.core.prefix_data import PrefixData + +from conda_build.inspect_pkg import which_package +from conda_build.utils import on_win + + +def test_which_package(tmp_path: Path): + # create a dummy environment + (tmp_path / "conda-meta").mkdir() + (tmp_path / "conda-meta" / "history").touch() + + # dummy files + (tmp_path / "hardlinkA").touch() # packageA + (tmp_path / "shared").touch() # packageA & packageB + (tmp_path / "internal").symlink_to(tmp_path / "hardlinkA") # packageA + (tmp_path / "external").symlink_to(tmp_path / "hardlinkB") # packageA + (tmp_path / "hardlinkB").touch() # packageB + # Files might be deleted from the prefix during the build, but they should + # still be recognized since they will be present in the run environment. + (tmp_path / "deleted").unlink(missing_ok=True) # packageA + (tmp_path / "deleted_shared").unlink(missing_ok=True) # packageA & packageB + + # a dummy package with a hardlink file, shared file, internal softlink, + # external softlink, deleted file, and deleted shared file + (tmp_path / "conda-meta" / "packageA-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageA-channel", + "files": [ + "hardlinkA", + "shared", + "internal", + "external", + "deleted", + "deleted_shared", + ], + "name": "packageA", + "paths_data": { + "paths": [ + { + "_path": "hardlinkA", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "internal", + "path_type": "softlink", + "size_in_bytes": 0, + }, + { + "_path": "external", + "path_type": "softlink", + "size_in_bytes": 0, + }, + { + "_path": "deleted", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "deleted_shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + # a dummy package with a hardlink file, shared file, and deleted shared file + (tmp_path / "conda-meta" / "packageB-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageB-channel", + "files": ["hardlinkB", "shared", "deleted_shared"], + "name": "packageB", + "paths_data": { + "paths": [ + { + "_path": "hardlinkB", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "deleted_shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # fetch package records + pd = PrefixData(tmp_path) + precA = pd.get("packageA") + precB = pd.get("packageB") + + # test returned package records given a path + precs_missing = list(which_package(tmp_path / "missing", tmp_path)) + assert not precs_missing + + precs_Hardlinka = list(which_package(tmp_path / "Hardlinka", tmp_path)) + if on_win: + # On Windows, be lenient and allow case-insensitive path comparisons. + assert len(precs_Hardlinka) == 1 + assert set(precs_Hardlinka) == {precA} + else: + assert not precs_Hardlinka + + precs_hardlinkA = list(which_package(tmp_path / "hardlinkA", tmp_path)) + assert len(precs_hardlinkA) == 1 + assert set(precs_hardlinkA) == {precA} + + precs_shared = list(which_package(tmp_path / "shared", tmp_path)) + assert len(precs_shared) == 2 + assert set(precs_shared) == {precA, precB} + + precs_internal = list(which_package(tmp_path / "internal", tmp_path)) + assert len(precs_internal) == 1 + assert set(precs_internal) == {precA} + + precs_external = list(which_package(tmp_path / "external", tmp_path)) + assert len(precs_external) == 1 + assert set(precs_external) == {precA} + + precs_hardlinkB = list(which_package(tmp_path / "hardlinkB", tmp_path)) + assert len(precs_hardlinkB) == 1 + assert set(precs_hardlinkB) == {precB} + + precs_deleted = list(which_package(tmp_path / "deleted", tmp_path)) + assert len(precs_deleted) == 1 + assert set(precs_deleted) == {precA} + + precs_deleted_shared = list(which_package(tmp_path / "deleted_shared", tmp_path)) + assert len(precs_deleted_shared) == 2 + assert set(precs_deleted_shared) == {precA, precB} + + # reuse environment, regression test for #5136 + (tmp_path / "conda-meta" / "packageA-1-0.json").unlink() + (tmp_path / "conda-meta" / "packageB-1-0.json").unlink() + + # a dummy package with a hardlink file + (tmp_path / "conda-meta" / "packageC-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageC-channel", + "files": ["hardlinkA"], + "name": "packageC", + "paths_data": { + "paths": [ + { + "_path": "hardlinkA", + "path_type": "hardlink", + "size_in_bytes": 0, + } + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # fetch package records + PrefixData._cache_.clear() + pd = PrefixData(tmp_path) + precC = pd.get("packageC") + + # test returned package records given a path + precs_reused = list(which_package(tmp_path / "hardlinkA", tmp_path)) + assert len(precs_reused) == 1 + assert set(precs_reused) == {precC} + + +@pytest.mark.benchmark +def test_which_package_battery(tmp_path: Path): + # regression: https://github.com/conda/conda-build/issues/5126 + + # NOTE: CodSpeed on Python 3.12+ activates the stack profiler trampoline backend + # and thus runs the test twice (once without profiling and once with profiling), + # unfortunately this means that on the second iteration tmp_path is no longer empty + # so we create a randomized unique directory to compensate + tmp_path = tmp_path / uuid4().hex + tmp_path.mkdir() + + # create a dummy environment + (tmp_path / "conda-meta").mkdir() + (tmp_path / "conda-meta" / "history").touch() + (tmp_path / "lib").mkdir() + + # dummy packages with files + removed = [] + for _ in range(10): + name = f"package_{uuid4().hex}" + + # mock a package with 100 files + files = [f"lib/{uuid4().hex}" for _ in range(100)] + for file in files: + (tmp_path / file).touch() + + # mock a removed file + remove = f"lib/{uuid4().hex}" + files.append(remove) + removed.append(remove) + + (tmp_path / "conda-meta" / f"{name}-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": f"{name}-channel", + "files": files, + "name": name, + "paths_data": { + "paths": [ + {"_path": file, "path_type": "hardlink", "size_in_bytes": 0} + for file in files + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # every path should return exactly one package + for subdir, _, files in os.walk(tmp_path / "lib"): + for file in files: + path = Path(subdir, file) + + assert len(list(which_package(path, tmp_path))) == 1 + + # removed files should still return a package + # this occurs when, e.g., a build script removes files installed by another package + # (post-install scripts removing files from the run environment is less + # likely and not covered) + for file in removed: + assert len(list(which_package(tmp_path / file, tmp_path))) == 1 + + # missing files should return no packages + assert not len(list(which_package(tmp_path / "missing", tmp_path))) diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index a6e6eedefa..f19ea31997 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -1,167 +1,171 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from typing import TYPE_CHECKING + import pytest +from frozendict import deepfreeze from conda_build import jinja_context -from conda_build.utils import HashableDict + +if TYPE_CHECKING: + from pathlib import Path + from typing import Any def test_pin_default(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test') - assert pin == 'test >=1.2.3,<2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test") + assert pin == "test >=1.2.3,<2.0a0" def test_pin_compatible_exact(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3 abc_0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', exact=True) - assert pin == 'test 1.2.3 abc_0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3 abc_0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", exact=True) + assert pin == "test 1.2.3 abc_0" def test_pin_jpeg_style_default(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['jpeg 9d 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'jpeg') - assert pin == 'jpeg >=9d,<10a' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["jpeg 9d 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "jpeg") + assert pin == "jpeg >=9d,<10a" def test_pin_jpeg_style_minor(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['jpeg 9d 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'jpeg', max_pin='x.x') - assert pin == 'jpeg >=9d,<9e' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["jpeg 9d 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "jpeg", max_pin="x.x") + assert pin == "jpeg >=9d,<9e" def test_pin_openssl_style_bugfix(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['openssl 1.0.2j 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'openssl', max_pin='x.x.x') - assert pin == 'openssl >=1.0.2j,<1.0.3a' - pin = jinja_context.pin_compatible(testing_metadata, 'openssl', max_pin='x.x.x.x') - assert pin == 'openssl >=1.0.2j,<1.0.2k' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["openssl 1.0.2j 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "openssl", max_pin="x.x.x") + assert pin == "openssl >=1.0.2j,<1.0.3a" + pin = jinja_context.pin_compatible(testing_metadata, "openssl", max_pin="x.x.x.x") + assert pin == "openssl >=1.0.2j,<1.0.2k" def test_pin_major_minor(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin='x.x') - assert pin == 'test >=1.2.3,<1.3.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin="x.x") + assert pin == "test >=1.2.3,<1.3.0a0" def test_pin_excessive_max_pin(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin='x.x.x.x.x.x') - assert pin == 'test >=1.2.3,<1.2.4.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin="x.x.x.x.x.x") + assert pin == "test >=1.2.3,<1.2.4.0a0" def test_pin_upper_bound(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', upper_bound="3.0") - assert pin == 'test >=1.2.3,<3.0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", upper_bound="3.0") + assert pin == "test >=1.2.3,<3.0" def test_pin_lower_bound(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', lower_bound=1.0) - assert pin == 'test >=1.0,<2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", lower_bound=1.0) + assert pin == "test >=1.0,<2.0a0" def test_pin_none_min(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', min_pin=None) - assert pin == 'test <2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", min_pin=None) + assert pin == "test <2.0a0" def test_pin_none_max(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin=None) - assert pin == 'test >=1.2.3' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin=None) + assert pin == "test >=1.2.3" def test_pin_subpackage_exact(testing_metadata): name = testing_metadata.name() - output_dict = {'name': name} - testing_metadata.meta['outputs'] = [output_dict] + output_dict = {"name": name} + testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) - testing_metadata.other_outputs = {(name, HashableDict(testing_metadata.config.variant)): - (output_dict, fm)} + testing_metadata.other_outputs = { + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) + } pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True) assert len(pin.split()) == 3 def test_pin_subpackage_expression(testing_metadata): name = testing_metadata.name() - output_dict = {'name': name} - testing_metadata.meta['outputs'] = [output_dict] + output_dict = {"name": name} + testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) - testing_metadata.other_outputs = {(name, HashableDict(testing_metadata.config.variant)): - (output_dict, fm)} + testing_metadata.other_outputs = { + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) + } pin = jinja_context.pin_subpackage(testing_metadata, name) assert len(pin.split()) == 2 def test_resolved_packages(testing_metadata): - testing_metadata.meta['requirements']['build'] = ['numpy'] - packages = jinja_context.resolved_packages(testing_metadata, 'build') + testing_metadata.meta["requirements"]["build"] = ["numpy"] + packages = jinja_context.resolved_packages(testing_metadata, "build") assert all(len(pkg.split()) == 3 for pkg in packages) - assert any('numpy' == pkg.split()[0] for pkg in packages) - assert any('python' == pkg.split()[0] for pkg in packages) - - -try: - try: - # Recommended for setuptools 61.0.0+ - # (though may disappear in the future) - from setuptools.config.setupcfg import read_configuration - except ImportError: - from setuptools.config import read_configuration - del read_configuration -except ImportError: - _has_read_configuration = False -else: - _has_read_configuration = True - - -@pytest.mark.skipif(not _has_read_configuration, - reason="setuptools <30.3.0 cannot read metadata / options from 'setup.cfg'") -def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmpdir): - setup_py = tmpdir.join('setup.py') - setup_cfg = tmpdir.join('setup.cfg') - setup_py.write( - 'from setuptools import setup\n' - 'setup(name="name_from_setup_py")\n' + assert any("numpy" == pkg.split()[0] for pkg in packages) + assert any("python" == pkg.split()[0] for pkg in packages) + + +def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmp_path: Path): + setup_py = tmp_path / "setup.py" + setup_cfg = tmp_path / "setup.cfg" + setup_py.write_text( + 'from setuptools import setup\nsetup(name="name_from_setup_py")\n' ) - setup_cfg.write( - '[metadata]\n' - 'name = name_from_setup_cfg\n' - 'version = version_from_setup_cfg\n' - '[options.extras_require]\n' - 'extra = extra_package\n' + setup_cfg.write_text( + "[metadata]\n" + "name = name_from_setup_cfg\n" + "version = version_from_setup_cfg\n" + "[options.extras_require]\n" + "extra = extra_package\n" ) - setup_file = str(setup_py) - setuptools_data = jinja_context.load_setup_py_data(testing_metadata, setup_file) + setuptools_data = jinja_context.load_setup_py_data(testing_metadata, str(setup_py)) # ensure that setup.cfg has priority over setup.py - assert setuptools_data['name'] == 'name_from_setup_cfg' - assert setuptools_data['version'] == 'version_from_setup_cfg' - assert setuptools_data['extras_require'] == {'extra': ['extra_package']} - - -@pytest.mark.parametrize("filename,fmt,data,expected", [ - ("file.json", None, '{"a": 1}', {"a": 1}), - ("json_file", "json", '{"a": 1}', {"a": 1}), - ("file.toml", None, '[tbl]\na = 1', {"tbl": {"a": 1}}), - ("toml_file", "toml", '[tbl]\na = 1', {"tbl": {"a": 1}}), - ("file.yaml", None, 'a: 1\nb:\n - c: 2', {"a": 1, "b": [{"c": 2}]}), -]) -def test_load_file_data(tmpdir, filename, fmt, data, expected, testing_metadata): - f = tmpdir.join(filename) - f.write(data) - fn = str(f) - assert jinja_context.load_file_data(fn, fmt, config=testing_metadata.config) == expected + assert setuptools_data["name"] == "name_from_setup_cfg" + assert setuptools_data["version"] == "version_from_setup_cfg" + assert setuptools_data["extras_require"] == {"extra": ["extra_package"]} + + +@pytest.mark.parametrize( + "filename,fmt,data,expected", + [ + ("file.json", None, '{"a": 1}', {"a": 1}), + ("json_file", "json", '{"a": 1}', {"a": 1}), + ("file.toml", None, "[tbl]\na = 1", {"tbl": {"a": 1}}), + ("toml_file", "toml", "[tbl]\na = 1", {"tbl": {"a": 1}}), + ("file.yaml", None, "a: 1\nb:\n - c: 2", {"a": 1, "b": [{"c": 2}]}), + ], +) +def test_load_file_data( + tmp_path: Path, + filename: str, + fmt: str | None, + data: str, + expected: Any, + testing_metadata, +): + path = tmp_path / filename + path.write_text(data) + assert ( + jinja_context.load_file_data(str(path), fmt, config=testing_metadata.config) + == expected + ) diff --git a/tests/test_license_family.py b/tests/test_license_family.py index 775e5898a8..fc0882f7cc 100644 --- a/tests/test_license_family.py +++ b/tests/test_license_family.py @@ -1,147 +1,74 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from conda_build.license_family import guess_license_family, allowed_license_families, ensure_valid_license_family import pytest - -def test_new_vs_previous_guesses_match(): +from conda_build.license_family import ensure_valid_license_family, guess_license_family + +LICENSE_FAMILY = { + # AGPL + "Affero GPL": "AGPL", + # APACHE + "Apache License (== 2.0)": "APACHE", + "Apache License 2.0": "APACHE", + # BSD + "BSD License": "BSD", + "BSD_2_clause + file LICENSE": "BSD", + "BSD_3_clause + file LICENSE": "BSD", + # CC + "CC0": "CC", + # GPL + "GPL": "GPL", # previously, GPL3 was incorrectly preferred + # GPL2 + "GNU General Public License v2 or later (GPLv2+)": "GPL2", + "GPL-2 | file LICENSE": "GPL2", + "GPL-2": "GPL2", + # GPL3 + "GNU General Public License some stuff then a 3 then stuff": "GPL3", + "GPL (>= 2) | file LICENSE": "GPL3", + "GPL (>= 2)": "GPL3", + "GPL (>= 3) | file LICENCE": "GPL3", + "GPL (>= 3)": "GPL3", + "GPL 3": "GPL3", + "GPL-2 | GPL-3 | file LICENSE": "GPL3", # previously, Public-Domain was incorrectly preferred + "GPL-2 | GPL-3": "GPL3", + "GPL-3 | file LICENSE": "GPL3", + "GPL-3": "GPL3", + # LGPL + "BSD License and GNU Library or Lesser General Public License (LGPL)": "LGPL", + "GNU Lesser General Public License (LGPL)": "LGPL", + "GNU Lesser General Public License": "LGPL", + "LGPL (>= 2)": "LGPL", + "LGPL-2": "LGPL", + "LGPL-2.1": "LGPL", + "LGPL-3": "LGPL", + # MIT + "MIT + file LICENSE | Unlimited": "MIT", + "MIT + file LICENSE": "MIT", + "MIT License": "MIT", + "Old MIT": "MIT", + "Unlimited": "MIT", # unfortunate corner case + # NONE + None: "NONE", + # OTHER + "BSL-1.0": "OTHER", + "Custom free software license": "OTHER", + "file LICENSE (FOSS)": "OTHER", + "Free software (X11 License)": "OTHER", + "Lucent Public License": "OTHER", + "Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)": "OTHER", + "zlib (http://zlib.net/zlib_license.html)": "OTHER", +} + + +@pytest.mark.parametrize("license,family", LICENSE_FAMILY.items()) +def test_guess_license_family(license, family): """Test cases where new and deprecated functions match""" - - cens = "GPL (>= 3)" - fam = guess_license_family(cens) - assert fam == 'GPL3' - - cens = 'GNU Lesser General Public License' - fam = guess_license_family(cens) - assert fam == 'LGPL', f'guess_license_family({cens}) is {fam}' - - cens = 'GNU General Public License some stuff then a 3 then stuff' - fam = guess_license_family(cens) - assert fam == 'GPL3', f'guess_license_family({cens}) is {fam}' - - cens = 'Affero GPL' - fam = guess_license_family(cens) - assert fam == 'AGPL', f'guess_license_family({cens}) is {fam}' - - -def test_new_vs_previous_guess_differ_gpl(): - """Test cases where new and deprecated functions differ - - license = 'GPL' - New guess is GPL, which is an allowed family, hence the most accurate. - Previously, GPL3 was chosen over GPL - """ - cens = "GPL" - fam = guess_license_family(cens) - assert fam == 'GPL' - - -def test_new_vs_previous_guess_differ_multiple_gpl(): - """Test cases where new and deprecated functions differ - - license = 'GPL-2 | GPL-3 | file LICENSE' - New guess is GPL-3, which is the most accurate. - Previously, somehow Public-Domain is closer than GPL2 or GPL3! - """ - cens = 'GPL-2 | GPL-3 | file LICENSE' - fam = guess_license_family(cens) - assert fam == 'GPL3', f'guess_license_family_from_index({cens}) is {fam}' - - -def test_old_warnings_no_longer_fail(): - # the following previously threw warnings. Came from r/linux-64 - warnings = {'MIT License', 'GNU Lesser General Public License (LGPL)', - 'GPL-2 | GPL-3 | file LICENSE', 'GPL (>= 3) | file LICENCE', - 'BSL-1.0', 'GPL (>= 2)', 'file LICENSE (FOSS)', - 'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', - 'MIT + file LICENSE', 'GPL-2 | GPL-3', 'GPL (>= 2) | file LICENSE', - 'Unlimited', 'GPL-3 | file LICENSE', - 'GNU General Public License v2 or later (GPLv2+)', 'LGPL-2.1', - 'LGPL-2', 'LGPL-3', 'GPL', - 'zlib (http://zlib.net/zlib_license.html)', - 'Free software (X11 License)', 'Custom free software license', - 'Old MIT', 'GPL 3', 'Apache License (== 2.0)', 'GPL (>= 3)', None, - 'LGPL (>= 2)', 'BSD_2_clause + file LICENSE', 'GPL-3', 'GPL-2', - 'BSD License and GNU Library or Lesser General Public License (LGPL)', - 'GPL-2 | file LICENSE', 'BSD_3_clause + file LICENSE', 'CC0', - 'MIT + file LICENSE | Unlimited', 'Apache License 2.0', - 'BSD License', 'Lucent Public License'} - - for cens in warnings: - fam = guess_license_family(cens) - print(f'{cens}:{fam}') - assert fam in allowed_license_families - - -def test_gpl2(): - licenses = {'GPL-2', 'GPL-2 | file LICENSE', - 'GNU General Public License v2 or later (GPLv2+)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'GPL2' - - -def test_not_gpl2(): - licenses = {'GPL (>= 2)', 'LGPL (>= 2)', 'GPL', - 'LGPL-3', 'GPL 3', 'GPL (>= 3)', - 'Apache License (== 2.0)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam != 'GPL2' - - -def test_gpl3(): - licenses = {'GPL 3', 'GPL-3', 'GPL-3 | file LICENSE', - 'GPL-2 | GPL-3 | file LICENSE', 'GPL (>= 3) | file LICENCE', - 'GPL (>= 2)', 'GPL-2 | GPL-3', 'GPL (>= 2) | file LICENSE'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'GPL3' - - -def test_lgpl(): - licenses = {'GNU Lesser General Public License (LGPL)', 'LGPL-2.1', - 'LGPL-2', 'LGPL-3', 'LGPL (>= 2)', - 'BSD License and GNU Library or Lesser General Public License (LGPL)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'LGPL' - - -def test_mit(): - licenses = {'MIT License', 'MIT + file LICENSE', 'Old MIT'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'MIT' - - -def test_unlimited(): - """The following is an unfortunate case where MIT is in UNLIMITED - - We could add words to filter out, but it would be hard to keep track of... - """ - cens = 'Unlimited' - assert guess_license_family(cens) == 'MIT' - - -def test_cc(): - fam = guess_license_family('CC0') - assert fam == 'CC' - - -def test_other(): - licenses = {'file LICENSE (FOSS)', - 'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', - 'zlib (http://zlib.net/zlib_license.html)', - 'Free software (X11 License)', 'Custom free software license'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'OTHER' + assert guess_license_family(license) == family def test_ensure_valid_family(testing_metadata): - testing_metadata.meta['about']['license_family'] = 'public-domain' + testing_metadata.meta["about"]["license_family"] = "public-domain" ensure_valid_license_family(testing_metadata.meta) with pytest.raises(RuntimeError): - testing_metadata.meta['about']['license_family'] = 'local H' + testing_metadata.meta["about"]["license_family"] = "local H" ensure_valid_license_family(testing_metadata.meta) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 2bf9b0c045..1b9fc34258 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,136 +1,227 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os import subprocess +import sys +from itertools import product +from typing import TYPE_CHECKING import pytest +from conda import __version__ as conda_version +from conda.base.context import context +from packaging.version import Version + +from conda_build import api +from conda_build.config import Config +from conda_build.metadata import ( + FIELDS, + OPTIONALLY_ITERABLE_FIELDS, + MetaData, + _hash_dependencies, + get_selectors, + select_lines, + yamlize, +) +from conda_build.utils import DEFAULT_SUBDIRS +from conda_build.variants import DEFAULT_VARIANTS -from conda_build.metadata import select_lines, MetaData -from conda_build import api, conda_interface -from .utils import thisdir, metadata_dir +from .utils import metadata_dir, metadata_path, thisdir -from conda_build.utils import DEFAULT_SUBDIRS -from conda_build.metadata import _hash_dependencies +if TYPE_CHECKING: + from pytest import MonkeyPatch def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): - testing_metadata._meta_path = os.path.join(testing_workdir, 'meta.yaml') - testing_metadata._meta_name = 'meta.yaml' - with open(testing_metadata.meta_path, 'w') as f: - f.write('http://hg.something.com') + testing_metadata._meta_path = os.path.join(testing_workdir, "meta.yaml") + testing_metadata._meta_name = "meta.yaml" + with open(testing_metadata.meta_path, "w") as f: + f.write("http://hg.something.com") assert not testing_metadata.uses_vcs_in_meta assert not testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('hg something something') + with open(testing_metadata.meta_path, "w") as f: + f.write("hg something something") assert not testing_metadata.uses_vcs_in_meta assert testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('hg.exe something something') + with open(testing_metadata.meta_path, "w") as f: + f.write("hg.exe something something") assert not testing_metadata.uses_vcs_in_meta assert testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('HG_WEEEEE') + with open(testing_metadata.meta_path, "w") as f: + f.write("HG_WEEEEE") assert testing_metadata.uses_vcs_in_meta assert not testing_metadata.uses_vcs_in_build def test_select_lines(): - lines = """ -test -test [abc] no -test [abc] # no - -test [abc] - 'quoted # [abc] ' - "quoted # [abc] yes " -test # stuff [abc] yes -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} # stuff [abc] yes -test {{ JINJA_VAR[:2] }} # stuff yes [abc] -test {{ JINJA_VAR[:2] }} # [abc] stuff yes -{{ environ["test"] }} # [abc] -""" - - assert select_lines(lines, {'abc': True}, variants_in_place=True) == """ -test -test [abc] no -test [abc] # no - -test - 'quoted' - "quoted" -test -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -{{ environ["test"] }} -""" - assert select_lines(lines, {'abc': False}, variants_in_place=True) == """ -test -test [abc] no -test [abc] # no - -test {{ JINJA_VAR[:2] }} -""" + lines = "\n".join( + ( + "", # preserve leading newline + "test", + "test [abc] no", + "test [abc] # no", + " ' test ' ", + ' " test " ', + "", # preserve newline + "# comment line", # preserve comment line (but not the comment) + "test [abc]", + " 'quoted # [abc] '", + ' "quoted # [abc] yes "', + "test # stuff [abc] yes", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }} # stuff [abc] yes", + "test {{ JINJA_VAR[:2] }} # stuff yes [abc]", + "test {{ JINJA_VAR[:2] }} # [abc] stuff yes", + '{{ environ["test"] }} # [abc]', + "", # preserve trailing newline + ) + ) + + assert select_lines(lines, {"abc": True}, variants_in_place=True) == "\n".join( + ( + "", # preserve leading newline + "test", + "test [abc] no", + "test [abc] # no", + " ' test '", + ' " test "', + "", # preserve newline + "", # preserve comment line (but not the comment) + "test", + " 'quoted'", + ' "quoted"', + "test", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + '{{ environ["test"] }}', + "", # preserve trailing newline + ) + ) + assert select_lines(lines, {"abc": False}, variants_in_place=True) == "\n".join( + ( + "", # preserve leading newline + "test", + "test [abc] no", + "test [abc] # no", + " ' test '", + ' " test "', + "", # preserve newline + "", # preserve comment line (but not the comment) + "test {{ JINJA_VAR[:2] }}", + "", # preserve trailing newline + ) + ) + + +@pytest.mark.benchmark +def test_select_lines_battery(): + test_foo = "test [foo]" + test_bar = "test [bar]" + test_baz = "test [baz]" + test_foo_and_bar = "test [foo and bar]" + test_foo_and_baz = "test [foo and baz]" + test_foo_or_bar = "test [foo or bar]" + test_foo_or_baz = "test [foo or baz]" + + lines = "\n".join( + ( + test_foo, + test_bar, + test_baz, + test_foo_and_bar, + test_foo_and_baz, + test_foo_or_bar, + test_foo_or_baz, + ) + * 10 + ) + + for _ in range(10): + for foo, bar, baz in product((True, False), repeat=3): + namespace = {"foo": foo, "bar": bar, "baz": baz} + selection = ( + ["test"] + * ( + foo + + bar + + baz + + (foo and bar) + + (foo and baz) + + (foo or bar) + + (foo or baz) + ) + * 10 + ) + selection = "\n".join(selection) + "\n" # trailing newline + assert select_lines(lines, namespace, variants_in_place=True) == selection def test_disallow_leading_period_in_version(testing_metadata): - testing_metadata.meta['package']['version'] = '.ste.ve' + testing_metadata.meta["package"]["version"] = ".ste.ve" testing_metadata.final = True with pytest.raises(ValueError): testing_metadata.version() def test_disallow_dash_in_features(testing_metadata): - testing_metadata.meta['build']['features'] = ['abc'] + testing_metadata.meta["build"]["features"] = ["abc"] testing_metadata.parse_again() with pytest.raises(ValueError): - testing_metadata.meta['build']['features'] = ['ab-c'] + testing_metadata.meta["build"]["features"] = ["ab-c"] testing_metadata.parse_again() def test_append_section_data(testing_metadata): testing_metadata.final = False testing_metadata.parse_again() - requirements_len = len(testing_metadata.meta['requirements'].get('build', [])) - testing_metadata.config.append_sections_file = os.path.join(thisdir, 'test-append.yaml') + requirements_len = len(testing_metadata.meta["requirements"].get("build", [])) + testing_metadata.config.append_sections_file = os.path.join( + thisdir, "test-append.yaml" + ) testing_metadata.final = False testing_metadata.parse_again() - assert len(testing_metadata.meta['requirements']['build']) == requirements_len + 1 - assert 'frank' in testing_metadata.meta['requirements']['build'] + assert len(testing_metadata.meta["requirements"]["build"]) == requirements_len + 1 + assert "frank" in testing_metadata.meta["requirements"]["build"] def test_clobber_section_data(testing_metadata): - testing_metadata.config.clobber_sections_file = os.path.join(thisdir, 'test-clobber.yaml') + testing_metadata.config.clobber_sections_file = os.path.join( + thisdir, "test-clobber.yaml" + ) testing_metadata.final = False testing_metadata.parse_again() # a field that should be clobbered - testing_metadata.meta['about']['summary'] = 'yep' + testing_metadata.meta["about"]["summary"] = "yep" # a field that should stay the same - testing_metadata.meta['about']['home'] = 'sweet home' + testing_metadata.meta["about"]["home"] = "sweet home" @pytest.mark.serial def test_build_bootstrap_env_by_name(testing_metadata): - assert not any("git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])), \ - testing_metadata.meta["requirements"].get("build", []) + assert not any( + "git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", []) + ), testing_metadata.meta["requirements"].get("build", []) try: cmd = "conda create -y -n conda_build_bootstrap_test git" subprocess.check_call(cmd.split()) testing_metadata.config.bootstrap = "conda_build_bootstrap_test" testing_metadata.final = False testing_metadata.parse_again() - assert any("git" in pkg for pkg in testing_metadata.meta["requirements"]["build"]), \ - testing_metadata.meta["requirements"]["build"] + assert any( + "git" in pkg for pkg in testing_metadata.meta["requirements"]["build"] + ), testing_metadata.meta["requirements"]["build"] finally: cmd = "conda remove -y -n conda_build_bootstrap_test --all" subprocess.check_call(cmd.split()) def test_build_bootstrap_env_by_path(testing_metadata): - assert not any("git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])), \ - testing_metadata.meta["requirements"].get("build", []) + assert not any( + "git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", []) + ), testing_metadata.meta["requirements"].get("build", []) path = os.path.join(thisdir, "conda_build_bootstrap_test") try: cmd = f"conda create -y -p {path} git" @@ -138,91 +229,136 @@ def test_build_bootstrap_env_by_path(testing_metadata): testing_metadata.config.bootstrap = path testing_metadata.final = False testing_metadata.parse_again() - assert any("git" in pkg for pkg in testing_metadata.meta["requirements"]["build"]), \ - testing_metadata.meta["requirements"]["build"] + assert any( + "git" in pkg for pkg in testing_metadata.meta["requirements"]["build"] + ), testing_metadata.meta["requirements"]["build"] finally: cmd = f"conda remove -y -p {path} --all" subprocess.check_call(cmd.split()) -@pytest.mark.parametrize('py_ver', [('2.7', 'vs2008_win-x86_64'), - ('3.4', 'vs2010_win-x86_64'), - ('3.7', 'vs2017_win-x86_64'), ]) -def test_native_compiler_metadata_win(testing_config, py_ver, mocker): - testing_config.platform = 'win' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, - variants={'target_platform': 'win-x86_64'}, - permit_unsatisfiable_variants=True, finalize=False, - bypass_env_check=True, python=py_ver[0])[0][0] - # see parameterization - py_ver[1] is the compiler package name - assert any(dep.startswith(py_ver[1]) for dep in metadata.meta['requirements']['build']) - - -def test_native_compiler_metadata_linux(testing_config, mocker): - testing_config.platform = 'linux' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), - config=testing_config, permit_unsatisfiable_variants=True, - finalize=False, bypass_env_check=True)[0][0] - _64 = '64' if conda_interface.bits == 64 else '32' - assert any(dep.startswith('gcc_linux-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gxx_linux-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gfortran_linux-' + _64) for dep in metadata.meta['requirements']['build']) - - -def test_native_compiler_metadata_osx(testing_config, mocker): - testing_config.platform = 'osx' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), - config=testing_config, permit_unsatisfiable_variants=True, - finalize=False, bypass_env_check=True)[0][0] - _64 = '64' if conda_interface.bits == 64 else '32' - assert any(dep.startswith('clang_osx-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('clangxx_osx-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gfortran_osx-' + _64) for dep in metadata.meta['requirements']['build']) +@pytest.mark.parametrize( + "platform,arch,python,compilers", + [ + ("win", "x86_64", "2.7", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.1", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.2", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.3", {"vs2010_win-x86_64"}), + ("win", "x86_64", "3.4", {"vs2010_win-x86_64"}), + ("win", "x86_64", "3.5", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.6", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.7", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.8", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.9", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.10", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.11", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.12", {"vs2017_win-x86_64"}), + ("linux", "32", "3.12", {"gcc_linux-32", "gxx_linux-32"}), + ("linux", "64", "3.12", {"gcc_linux-64", "gxx_linux-64"}), + ("osx", "32", "3.12", {"clang_osx-32", "clangxx_osx-32"}), + ("osx", "64", "3.12", {"clang_osx-64", "clangxx_osx-64"}), + ], +) +def test_native_compiler_metadata( + platform: str, arch: str, python: str, compilers: set[str], testing_config +): + testing_config.platform = platform + metadata = api.render( + os.path.join(metadata_dir, "_compiler_jinja2"), + config=testing_config, + variants={"target_platform": f"{platform}-{arch}"}, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + python=python, + )[0][0] + assert compilers <= set(metadata.meta["requirements"]["build"]) def test_compiler_metadata_cross_compiler(): - variant = {'c_compiler': 'c-compiler-linux', - 'cxx_compiler': 'cxx-compiler-linux', - 'fortran_compiler': 'fortran-compiler-linux', - 'target_platform': 'osx-109-x86_64'} - metadata = MetaData(os.path.join(metadata_dir, '_compiler_jinja2'), variant=variant) - assert 'c-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] - assert 'cxx-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] - assert 'fortran-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] + variant = { + "c_compiler": "c-compiler-linux", + "cxx_compiler": "cxx-compiler-linux", + "fortran_compiler": "fortran-compiler-linux", + "target_platform": "osx-109-x86_64", + } + metadata = MetaData(os.path.join(metadata_dir, "_compiler_jinja2"), variant=variant) + assert "c-compiler-linux_osx-109-x86_64" in metadata.meta["requirements"]["build"] + assert "cxx-compiler-linux_osx-109-x86_64" in metadata.meta["requirements"]["build"] + assert ( + "fortran-compiler-linux_osx-109-x86_64" + in metadata.meta["requirements"]["build"] + ) + + +@pytest.mark.parametrize( + "platform,arch,stdlib,stdlib_version", + [ + ("linux", "64", "sysroot", "2.12"), + ("linux", "aarch64", "sysroot", "2.17"), + ("osx", "64", "macosx_deployment_target", "10.13"), + ("osx", "arm64", "macosx_deployment_target", "11.0"), + ], +) +def test_native_stdlib_metadata( + platform: str, arch: str, stdlib: str, stdlib_version: str, testing_config +): + testing_config.platform = platform + metadata = api.render( + os.path.join(metadata_dir, "_stdlib_jinja2"), + config=testing_config, + variants={"target_platform": f"{platform}-{arch}"}, + platform=platform, + arch=arch, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + python="3.11", # irrelevant + )[0][0] + stdlib_req = f"{stdlib}_{platform}-{arch} {stdlib_version}.*" + assert stdlib_req in metadata.meta["requirements"]["host"] + assert {"c_stdlib", "c_stdlib_version"} <= metadata.get_used_vars() + hash_contents = metadata.get_hash_contents() + assert stdlib == hash_contents["c_stdlib"] + assert stdlib_version == hash_contents["c_stdlib_version"] def test_hash_build_id(testing_metadata): - testing_metadata.config.variant['zlib'] = '1.2' - testing_metadata.meta['requirements']['host'] = ['zlib'] + testing_metadata.config.variant["zlib"] = "1.2" + testing_metadata.meta["requirements"]["host"] = ["zlib"] testing_metadata.final = True hash_contents = testing_metadata.get_hash_contents() - assert hash_contents['zlib'] == '1.2' + assert hash_contents["zlib"] == "1.2" hdeps = testing_metadata.hash_dependencies() hash_contents_tp = hash_contents.copy() found = False for subdir in DEFAULT_SUBDIRS: - hash_contents_tp['target_platform'] = subdir - hdeps_tp = _hash_dependencies(hash_contents_tp, testing_metadata.config.hash_length) + hash_contents_tp["target_platform"] = subdir + hdeps_tp = _hash_dependencies( + hash_contents_tp, testing_metadata.config.hash_length + ) if hdeps_tp == hdeps: found = True break - assert found, f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS" - assert testing_metadata.build_id() == hdeps + '_1' + assert ( + found + ), f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS" + assert testing_metadata.build_id() == hdeps + "_1" def test_hash_build_id_key_order(testing_metadata): - deps = testing_metadata.meta['requirements'].get('build', [])[:] + deps = testing_metadata.meta["requirements"].get("build", [])[:] # first, prepend newdeps = deps[:] - newdeps.insert(0, 'steve') - testing_metadata.meta['requirements']['build'] = newdeps + newdeps.insert(0, "steve") + testing_metadata.meta["requirements"]["build"] = newdeps hash_pre = testing_metadata.hash_dependencies() # next, append newdeps = deps[:] - newdeps.append('steve') - testing_metadata.meta['requirements']['build'] = newdeps + newdeps.append("steve") + testing_metadata.meta["requirements"]["build"] = newdeps hash_post = testing_metadata.hash_dependencies() # make sure they match @@ -230,7 +366,186 @@ def test_hash_build_id_key_order(testing_metadata): def test_config_member_decoupling(testing_metadata): - testing_metadata.config.some_member = 'abc' + testing_metadata.config.some_member = "abc" b = testing_metadata.copy() - b.config.some_member = '123' + b.config.some_member = "123" assert b.config.some_member != testing_metadata.config.some_member + + +# ensure that numbers are not interpreted as ints or floats, doing so trips up versions +# with trailing zeros +def test_yamlize_zero(): + yml = yamlize( + """ + - 0 + - 0. + - 0.0 + - .0 + """ + ) + + assert yml == ["0", "0.", "0.0", ".0"] + + +def test_yamlize_positive(): + yml = yamlize( + """ + - +1 + - +1. + - +1.2 + - +.2 + """ + ) + + assert yml == ["+1", "+1.", "+1.2", "+.2"] + + +def test_yamlize_negative(): + yml = yamlize( + """ + - -1 + - -1. + - -1.2 + - -.2 + """ + ) + + assert yml == ["-1", "-1.", "-1.2", "-.2"] + + +def test_yamlize_numbers(): + yml = yamlize( + """ + - 1 + - 1.2 + """ + ) + + assert yml == ["1", "1.2"] + + +def test_yamlize_versions(): + yml = yamlize( + """ + - 1.2.3 + - 1.2.3.4 + """ + ) + + assert yml == ["1.2.3", "1.2.3.4"] + + +OS_ARCH: tuple[str, ...] = ( + "aarch64", + "arm", + "arm64", + "armv6l", + "armv7l", + "linux", + "linux32", + "linux64", + "osx", + "ppc64", + "ppc64le", + "s390x", + "unix", + "win", + "win32", + "win64", + "x86", + "x86_64", + "z", + "zos", +) + +if Version(conda_version) >= Version("23.3"): + OS_ARCH = (*OS_ARCH, "riscv64") + +if Version(conda_version) >= Version("23.7"): + OS_ARCH = (*OS_ARCH, "freebsd") + +if Version(conda_version) >= Version("23.9"): + OS_ARCH = (*OS_ARCH, "emscripten", "wasi", "wasm32") + + +@pytest.mark.parametrize( + ( + "subdir", # defined in conda.base.constants.KNOWN_SUBDIRS + "expected", # OS_ARCH keys expected to be True + ), + [ + ("emscripten-wasm32", {"unix", "emscripten", "wasm32"}), + ("wasi-wasm32", {"wasi", "wasm32"}), + ("freebsd-64", {"freebsd", "x86", "x86_64"}), + ("linux-32", {"unix", "linux", "linux32", "x86"}), + ("linux-64", {"unix", "linux", "linux64", "x86", "x86_64"}), + ("linux-aarch64", {"unix", "linux", "aarch64"}), + ("linux-armv6l", {"unix", "linux", "arm", "armv6l"}), + ("linux-armv7l", {"unix", "linux", "arm", "armv7l"}), + ("linux-ppc64", {"unix", "linux", "ppc64"}), + ("linux-ppc64le", {"unix", "linux", "ppc64le"}), + ("linux-riscv64", {"unix", "linux", "riscv64"}), + ("linux-s390x", {"unix", "linux", "s390x"}), + ("osx-64", {"unix", "osx", "x86", "x86_64"}), + ("osx-arm64", {"unix", "osx", "arm64"}), + ("win-32", {"win", "win32", "x86"}), + ("win-64", {"win", "win64", "x86", "x86_64"}), + ("win-arm64", {"win", "arm64"}), + ("zos-z", {"zos", "z"}), + ], +) +@pytest.mark.parametrize("nomkl", [0, 1]) +def test_get_selectors( + monkeypatch: MonkeyPatch, + subdir: str, + expected: set[str], + nomkl: int, +): + monkeypatch.setenv("FEATURE_NOMKL", str(nomkl)) + + config = Config(host_subdir=subdir) + assert get_selectors(config) == { + # defaults + "build_platform": context.subdir, + "lua": DEFAULT_VARIANTS["lua"], + "luajit": DEFAULT_VARIANTS["lua"] == 2, + "np": int(float(DEFAULT_VARIANTS["numpy"]) * 100), + "os": os, + "pl": DEFAULT_VARIANTS["perl"], + "py": int(f"{sys.version_info.major}{sys.version_info.minor}"), + "py26": sys.version_info[:2] == (2, 6), + "py27": sys.version_info[:2] == (2, 7), + "py2k": sys.version_info.major == 2, + "py33": sys.version_info[:2] == (3, 3), + "py34": sys.version_info[:2] == (3, 4), + "py35": sys.version_info[:2] == (3, 5), + "py36": sys.version_info[:2] == (3, 6), + "py3k": sys.version_info.major == 3, + "nomkl": bool(nomkl), + # default OS/arch values + **{key: False for key in OS_ARCH}, + # environment variables + "environ": os.environ, + **os.environ, + # override with True values + **{key: True for key in expected}, + } + + +def test_fromstring(): + MetaData.fromstring((metadata_path / "multiple_sources" / "meta.yaml").read_text()) + + +def test_fromdict(): + MetaData.fromdict( + yamlize((metadata_path / "multiple_sources" / "meta.yaml").read_text()) + ) + + +def test_get_section(testing_metadata: MetaData): + for name in FIELDS: + section = testing_metadata.get_section(name) + if name in OPTIONALLY_ITERABLE_FIELDS: + assert isinstance(section, list) + else: + assert isinstance(section, dict) diff --git a/tests/test_misc.py b/tests/test_misc.py index fc9a415243..4a5bb0d95c 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -1,66 +1,42 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from os.path import join +from pathlib import Path import pytest -from conda_build.utils import on_win -import conda_build._link as _link -from conda_build.conda_interface import PathType, EntityEncoder, CrossPlatformStLink +from conda.auxlib.entity import EntityEncoder +from conda.models.enums import PathType +from conda_build._link import pyc_f -def test_pyc_f_2(): - assert _link.pyc_f('sp/utils.py', (2, 7, 9)) == 'sp/utils.pyc' - -def test_pyc_f_3(): - for f, r in [ - ('sp/utils.py', - 'sp/__pycache__/utils.cpython-34.pyc'), - ('sp/foo/utils.py', - 'sp/foo/__pycache__/utils.cpython-34.pyc'), - ]: - assert _link.pyc_f(f, (3, 4, 2)) == r +@pytest.mark.parametrize( + "source,python,compiled", + [ + ("path/utils.py", (2, 7), "path/utils.pyc"), + ("pa/th/utils.py", (2, 7), "pa/th/utils.pyc"), + ("path/utils.py", (3, 10), "path/__pycache__/utils.cpython-310.pyc"), + ("pa/th/utils.py", (3, 10), "pa/th/__pycache__/utils.cpython-310.pyc"), + ], +) +def test_pyc_f(source, python, compiled): + assert Path(pyc_f(source, python)) == Path(compiled) def test_pathtype(): hardlink = PathType("hardlink") assert str(hardlink) == "hardlink" - assert hardlink.__json__() == 'hardlink' + assert hardlink.__json__() == "hardlink" softlink = PathType("softlink") assert str(softlink) == "softlink" assert softlink.__json__() == "softlink" -def test_entity_encoder(tmpdir): - test_file = join(str(tmpdir), "test-file") +def test_entity_encoder(tmp_path): + test_file = tmp_path / "test-file" test_json = {"a": PathType("hardlink"), "b": 1} - with open(test_file, "w") as f: - json.dump(test_json, f, cls=EntityEncoder) + test_file.write_text(json.dumps(test_json, cls=EntityEncoder)) - with open(test_file) as f: - json_file = json.load(f) + json_file = json.loads(test_file.read_text()) assert json_file == {"a": "hardlink", "b": 1} - - -@pytest.mark.skipif(on_win, reason="link not available on win/py2.7") -def test_crossplatform_st_link(tmpdir): - from os import link - test_file = join(str(tmpdir), "test-file") - test_file_linked = join(str(tmpdir), "test-file-linked") - test_file_link = join(str(tmpdir), "test-file-link") - - open(test_file, "a").close() - open(test_file_link, "a").close() - link(test_file_link, test_file_linked) - assert 1 == CrossPlatformStLink.st_nlink(test_file) - assert 2 == CrossPlatformStLink.st_nlink(test_file_link) - assert 2 == CrossPlatformStLink.st_nlink(test_file_linked) - - -@pytest.mark.skipif(not on_win, reason="already tested") -def test_crossplatform_st_link_on_win(tmpdir): - test_file = join(str(tmpdir), "test-file") - open(test_file, "a").close() - assert 1 == CrossPlatformStLink.st_nlink(test_file) diff --git a/tests/test_os_utils_external.py b/tests/test_os_utils_external.py index caf8fd49bd..a4f88d913a 100644 --- a/tests/test_os_utils_external.py +++ b/tests/test_os_utils_external.py @@ -1,57 +1,43 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import sys import os -import os.path +from pathlib import Path + +from conda.common.compat import on_win from conda_build.os_utils.external import find_executable def test_find_executable(testing_workdir, monkeypatch): - if sys.platform != "win32": - import stat - - path_components = [] - - def create_file(unix_path, put_on_path, executable): - localized_path = os.path.join(testing_workdir, *unix_path.split('/')) - # empty prefix by default - extra bit at beginning of file - if sys.platform == "win32": - localized_path = localized_path + ".bat" - - dirname = os.path.split(localized_path)[0] - if not os.path.isdir(dirname): - os.makedirs(dirname) - - if sys.platform == "win32": - prefix = "@echo off\n" - else: - prefix = "#!/bin/bash\nexec 1>&2\n" - with open(localized_path, 'w') as f: - f.write(prefix + """ - echo ******* You have reached the dummy {}. It is likely there is a bug in - echo ******* conda that makes it not add the _build/bin directory onto the - echo ******* PATH before running the source checkout tool - exit -1 - """.format(localized_path)) - - if put_on_path: - path_components.append(dirname) - - if executable: - st = os.stat(localized_path) - os.chmod(localized_path, st.st_mode | stat.S_IEXEC) - - return localized_path - - create_file('executable/not/on/path/with/target_name', put_on_path=False, executable=True) - create_file('non_executable/on/path/with/target_name', put_on_path=True, executable=False) - create_file('executable/on/path/with/non_target_name', put_on_path=True, executable=True) - target_path = create_file('executable/on/path/with/target_name', put_on_path=True, executable=True) - create_file('another/executable/later/on/path/with/target_name', put_on_path=True, executable=True) - - monkeypatch.setenv('PATH', os.pathsep.join(path_components)) - - find = find_executable('target_name') - - assert find == target_path, f"Expected to find 'target_name' in '{target_path}', but found it in '{find}'" + search_path = [] + + def touch(target, searchable=True, executable=True, alternative=False): + path = Path( + testing_workdir, + "alt" if alternative else "not", + "exec" if executable else "not", + "search" if searchable else "not", + target, + ) + if on_win: + path = path.with_suffix(".bat") + path.parent.mkdir(parents=True, exist_ok=True) + + path.touch(0o100 if executable else 0o666) + + if searchable: + search_path.append(str(path.parent)) + + return str(path) + + touch("target", searchable=False) + # Windows doesn't have an execute bit so this is the path found + win_expected = touch("target", executable=False) + touch("not_target") + nix_expected = touch("target") + touch("target", alternative=True) + expected = win_expected if on_win else nix_expected + + monkeypatch.setenv("PATH", os.pathsep.join(search_path)) + + assert find_executable("target") == expected diff --git a/tests/test_patch.py b/tests/test_patch.py index 6ca2702f24..7b1764ce58 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1,38 +1,79 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os +from __future__ import annotations + +from pathlib import Path +from subprocess import CalledProcessError from textwrap import dedent from types import SimpleNamespace -from subprocess import CalledProcessError import pytest from conda_build.source import ( - _ensure_LF, _ensure_CRLF, + _ensure_LF, _guess_patch_strip_level, apply_patch, ) -def test_patch_strip_level(testing_workdir, monkeypatch): - patchfiles = {'some/common/prefix/one.txt', - 'some/common/prefix/two.txt', - 'some/common/prefix/three.txt'} - folders = ('some', 'common', 'prefix') - files = ('one.txt', 'two.txt', 'three.txt') - os.makedirs(os.path.join(*folders)) - for file in files: - with open(os.path.join(os.path.join(*folders), file), 'w') as f: - f.write('hello\n') - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (0, False) - monkeypatch.chdir(folders[0]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (1, False) - monkeypatch.chdir(folders[1]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (2, False) - monkeypatch.chdir(folders[2]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (3, False) - monkeypatch.chdir(testing_workdir) +@pytest.mark.parametrize( + "patches,results", + [ + pytest.param( + [ + Path("one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (0, False), (0, False), (0, False)], + id="strip level 0", + ), + pytest.param( + [ + Path("some", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (0, True), (0, True)], + id="strip level 1", + ), + pytest.param( + [ + Path("some", "common", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (2, False), (0, True)], + id="strip level 2", + ), + pytest.param( + [ + Path("some", "common", "prefix", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (2, False), (3, False)], + id="strip level 3", + ), + ], +) +def test_patch_strip_level( + patches: Path, results: list[tuple[int, bool]], tmp_path: Path +): + # generate dummy files + for patch in patches: + (tmp_path / patch).parent.mkdir(parents=True, exist_ok=True) + (tmp_path / patch).touch() + + src_dir = tmp_path + assert _guess_patch_strip_level(patches, src_dir) == results[0] + src_dir = src_dir / "some" + assert _guess_patch_strip_level(patches, src_dir) == results[1] + src_dir = src_dir / "common" + assert _guess_patch_strip_level(patches, src_dir) == results[2] + src_dir = src_dir / "prefix" + assert _guess_patch_strip_level(patches, src_dir) == results[3] @pytest.fixture diff --git a/tests/test_post.py b/tests/test_post.py index a008c6656a..97ef1448fc 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -1,82 +1,172 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import json +import logging import os import shutil import sys +from pathlib import Path import pytest -from conda_build import post, api -from conda_build.utils import on_win, package_has_file, get_site_packages +from conda_build import api, post +from conda_build.utils import ( + get_site_packages, + on_linux, + on_mac, + on_win, + package_has_file, +) from .utils import add_mangling, metadata_dir +@pytest.mark.skipif( + sys.version_info >= (3, 10), + reason="Python 3.10+, py_compile terminates once it finds an invalid file", +) def test_compile_missing_pyc(testing_workdir): - good_files = ['f1.py', 'f3.py'] - bad_file = 'f2_bad.py' - tmp = os.path.join(testing_workdir, 'tmp') - shutil.copytree(os.path.join(os.path.dirname(__file__), 'test-recipes', - 'metadata', '_compile-test'), tmp) - post.compile_missing_pyc(os.listdir(tmp), cwd=tmp, - python_exe=sys.executable) + good_files = ["f1.py", "f3.py"] + bad_file = "f2_bad.py" + tmp = os.path.join(testing_workdir, "tmp") + shutil.copytree( + os.path.join( + os.path.dirname(__file__), "test-recipes", "metadata", "_compile-test" + ), + tmp, + ) + post.compile_missing_pyc(os.listdir(tmp), cwd=tmp, python_exe=sys.executable) for f in good_files: assert os.path.isfile(os.path.join(tmp, add_mangling(f))) assert not os.path.isfile(os.path.join(tmp, add_mangling(bad_file))) -@pytest.mark.skipif(on_win, reason="no linking on win") -def test_hardlinks_to_copies(testing_workdir): - with open('test1', 'w') as f: +def test_hardlinks_to_copies(): + with open("test1", "w") as f: f.write("\n") - os.link('test1', 'test2') - assert os.lstat('test1').st_nlink == 2 - assert os.lstat('test2').st_nlink == 2 + os.link("test1", "test2") + assert os.lstat("test1").st_nlink == 2 + assert os.lstat("test2").st_nlink == 2 - post.make_hardlink_copy('test1', os.getcwd()) - post.make_hardlink_copy('test2', os.getcwd()) + post.make_hardlink_copy("test1", os.getcwd()) + post.make_hardlink_copy("test2", os.getcwd()) - assert os.lstat('test1').st_nlink == 1 - assert os.lstat('test2').st_nlink == 1 + assert os.lstat("test1").st_nlink == 1 + assert os.lstat("test2").st_nlink == 1 -def test_postbuild_files_raise(testing_metadata, testing_workdir): - fn = 'buildstr', 'buildnum', 'version' +def test_postbuild_files_raise(testing_metadata): + fn = "buildstr", "buildnum", "version" for f in fn: - with open(os.path.join(testing_metadata.config.work_dir, - f'__conda_{f}__.txt'), 'w') as fh: - fh.write('123') + with open( + os.path.join(testing_metadata.config.work_dir, f"__conda_{f}__.txt"), "w" + ) as fh: + fh.write("123") with pytest.raises(ValueError, match=f): post.get_build_metadata(testing_metadata) @pytest.mark.skipif(on_win, reason="fix_shebang is not executed on win32") -def test_fix_shebang(testing_config): - fname = 'test1' - with open(fname, 'w') as f: +def test_fix_shebang(): + fname = "test1" + with open(fname, "w") as f: f.write("\n") os.chmod(fname, 0o000) - post.fix_shebang(fname, '.', '/test/python') + post.fix_shebang(fname, ".", "/test/python") assert os.stat(fname).st_mode == 33277 # file with permissions 0o775 def test_postlink_script_in_output_explicit(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_in_output') + recipe = os.path.join(metadata_dir, "_post_link_in_output") pkg = api.build(recipe, config=testing_config, notest=True)[0] - assert (package_has_file(pkg, 'bin/.out1-post-link.sh') or - package_has_file(pkg, 'Scripts/.out1-post-link.bat')) + assert package_has_file(pkg, "bin/.out1-post-link.sh") or package_has_file( + pkg, "Scripts/.out1-post-link.bat" + ) def test_postlink_script_in_output_implicit(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_in_output_implicit') + recipe = os.path.join(metadata_dir, "_post_link_in_output_implicit") pkg = api.build(recipe, config=testing_config, notest=True)[0] - assert (package_has_file(pkg, 'bin/.out1-post-link.sh') or - package_has_file(pkg, 'Scripts/.out1-post-link.bat')) + assert package_has_file(pkg, "bin/.out1-post-link.sh") or package_has_file( + pkg, "Scripts/.out1-post-link.bat" + ) def test_pypi_installer_metadata(testing_config): - recipe = os.path.join(metadata_dir, '_pypi_installer_metadata') + recipe = os.path.join(metadata_dir, "_pypi_installer_metadata") pkg = api.build(recipe, config=testing_config, notest=True)[0] - expected_installer = '{}/imagesize-1.1.0.dist-info/INSTALLER'.format(get_site_packages('', '3.9')) - assert 'conda' == (package_has_file(pkg, expected_installer, refresh_mode='forced')) + expected_installer = "{}/imagesize-1.1.0.dist-info/INSTALLER".format( + get_site_packages("", "3.9") + ) + assert "conda" == (package_has_file(pkg, expected_installer, refresh_mode="forced")) + + +def test_menuinst_validation_ok(testing_config, caplog, tmp_path): + "1st check - validation passes with recipe as is" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + + with caplog.at_level(logging.INFO): + pkg = api.build(str(recipe_tmp), config=testing_config, notest=True)[0] + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON file" not in captured_text + assert "is a valid menuinst JSON document" in captured_text + assert package_has_file(pkg, "Menu/menu_json_validation.json") + + +def test_menuinst_validation_fails_bad_schema(testing_config, caplog, tmp_path): + "2nd check - valid JSON but invalid content fails validation" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + menu_json = recipe_tmp / "menu.json" + menu_json_contents = menu_json.read_text() + + bad_data = json.loads(menu_json_contents) + bad_data["menu_items"][0]["osx"] = ["bad", "schema"] + menu_json.write_text(json.dumps(bad_data, indent=2)) + with caplog.at_level(logging.WARNING): + api.build(str(recipe_tmp), config=testing_config, notest=True) + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON document" in captured_text + assert "ValidationError" in captured_text + + +def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path): + "3rd check - non-parsable JSON fails validation" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + menu_json = recipe_tmp / "menu.json" + menu_json_contents = menu_json.read_text() + menu_json.write_text(menu_json_contents + "Make this an invalid JSON") + + with caplog.at_level(logging.WARNING): + api.build(str(recipe_tmp), config=testing_config, notest=True) + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON document" in captured_text + assert "JSONDecodeError" in captured_text + + +@pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.") +def test_rpath_symlink(mocker, testing_config): + if on_linux: + mk_relative = mocker.spy(post, "mk_relative_linux") + elif on_mac: + mk_relative = mocker.spy(post, "mk_relative_osx") + api.build( + os.path.join(metadata_dir, "_rpath_symlink"), + config=testing_config, + variants={"rpaths_patcher": ["patchelf", "LIEF"]}, + activate=True, + ) + # Should only be called on the actual binary, not its symlinks. (once per variant) + assert mk_relative.call_count == 2 diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index 2d76a08e6c..95b2efc301 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -1,37 +1,40 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os +import sys +from pathlib import Path import pytest +from conda.testing.integration import BIN_DIRECTORY -import sys - -from conda_build import api +from conda_build.api import build from conda_build.utils import check_call_env -from .utils import metadata_dir, is_valid_dir -published_examples = os.path.join(os.path.dirname(metadata_dir), 'published_code') +from .utils import get_valid_recipes, published_path @pytest.mark.sanity -def test_skeleton_pypi(testing_workdir): +def test_skeleton_pypi(): """published in docs at https://docs.conda.io/projects/conda-build/en/latest/user-guide/tutorials/build-pkgs-skeleton.html""" - conda_path = os.path.join(sys.prefix, 'Scripts' if sys.platform == 'win32' else 'bin', 'conda') - cmd = conda_path + ' skeleton pypi click' - check_call_env(cmd.split()) - cmd = conda_path + ' build click' - check_call_env(cmd.split()) - + conda_path = os.path.join(sys.prefix, BIN_DIRECTORY, "conda") -@pytest.fixture(params=[dirname for dirname in os.listdir(published_examples) - if is_valid_dir(published_examples, dirname)]) -def recipe(request): - return os.path.join(published_examples, request.param) + check_call_env([conda_path, "skeleton", "pypi", "click"]) + check_call_env([conda_path, "build", "click"]) -# This tests any of the folders in the test-recipes/published_code folder that don't start with _ @pytest.mark.sanity -def test_recipe_builds(recipe, testing_config, testing_workdir): +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(published_path) + ], +) +def test_recipe_builds( + recipe: Path, + testing_config, + conda_build_test_recipe_envvar: str, +): # These variables are defined solely for testing purposes, # so they can be checked within build scripts - api.build(recipe, config=testing_config) + build(str(recipe), config=testing_config) diff --git a/tests/test_pypi_skeleton.py b/tests/test_pypi_skeleton.py index d86a81dbeb..20581ef14d 100644 --- a/tests/test_pypi_skeleton.py +++ b/tests/test_pypi_skeleton.py @@ -1,92 +1,88 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict +import pytest +from conda.auxlib.ish import dals from conda_build.skeletons import pypi -from conda_build.skeletons.pypi import _print_dict, _formating_value +from conda_build.skeletons.pypi import _formating_value, _print_dict -def test_version_compare(): - short_version = '2.2' - long_version = '1.4.5' - post_version = '2.2.post3' - pre_version = '2.2.pre3' - alpha_version = '1.4.5a4' - beta_version = '1.4.5b4' - rc_version = '1.4.5rc4' - padding_version_short = '2.2.0' - padding_version_long = '1.4.5.0' +@pytest.mark.parametrize( + "version,version_range", + [ + ("2.2", " >=2.2,<3"), + ("1.4.5", " >=1.4.5,<1.5"), + ("2.2.post3", " >=2.2.post3,<3"), + ("2.2.pre3", " >=2.2.pre3,<3"), + ("1.4.5a4", " >=1.4.5a4,<1.5"), + ("1.4.5b4", " >=1.4.5b4,<1.5"), + ("1.4.5rc4", " >=1.4.5rc4,<1.5"), + ("2.2.0", " >=2.2.0,<2.3"), + ("1.4.5.0", " >=1.4.5.0,<1.4.6"), + ], +) +def test_version_compare(version, version_range): + assert pypi.convert_version(version) == version_range - assert pypi.convert_version(short_version) == ' >=2.2,<3' - assert pypi.convert_version(long_version) == ' >=1.4.5,<1.5' - assert pypi.convert_version(post_version) == ' >=2.2.post3,<3' - assert pypi.convert_version(pre_version) == ' >=2.2.pre3,<3' - assert pypi.convert_version(alpha_version) == ' >=1.4.5a4,<1.5' - assert pypi.convert_version(beta_version) == ' >=1.4.5b4,<1.5' - assert pypi.convert_version(rc_version) == ' >=1.4.5rc4,<1.5' - assert pypi.convert_version(padding_version_short) == ' >=2.2.0,<2.3' - assert pypi.convert_version(padding_version_long) == ' >=1.4.5.0,<1.4.6' - -def test_formating_value(): - assert _formating_value("summary", "SUMMARY SUMMARY") == " \"SUMMARY SUMMARY\"\n" - assert _formating_value("description", "DESCRIPTION DESCRIPTION") == " \"DESCRIPTION DESCRIPTION\"\n" - assert _formating_value("script", "SCRIPT VALUE") == " \"SCRIPT VALUE\"\n" - assert _formating_value("name", "{{name|lower}}") == " \"{{name|lower}}\"\n" - assert _formating_value("name", "NORMAL NAME") == " NORMAL NAME\n" +@pytest.mark.parametrize( + "name,value,result", + [ + ("summary", "SUMMARY SUMMARY", ' "SUMMARY SUMMARY"\n'), + ("description", "DESCRIPTION DESCRIPTION", ' "DESCRIPTION DESCRIPTION"\n'), + ("script", "SCRIPT VALUE", ' "SCRIPT VALUE"\n'), + ("name", "{{name|lower}}", ' "{{name|lower}}"\n'), + ("name", "NORMAL NAME", " NORMAL NAME\n"), + ], +) +def test_formating_value(name, value, result): + assert _formating_value(name, value) == result def test_print_dict(): recipe_metadata = { - "about": OrderedDict( - [ - ("home", "https://conda.io"), - ("license", "MIT"), - ("license_family", "MIT"), - ("summary", "SUMMARY SUMMARY SUMMARY"), - ("description", "DESCRIPTION DESCRIPTION DESCRIPTION"), - ] - ), - "source": OrderedDict( - [ - ("sha256", "4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732"), - ("url", "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz"), - ] - ), - "package": OrderedDict( - [("name", "{{ name|lower }}"), ("version", "{{ version }}")] - ), - "build": OrderedDict( - [ - ("number", 0), - ("script", "{{ PYTHON }} -m pip install . -vv"), - ] - ), + "about": { + "home": "https://conda.io", + "license": "MIT", + "license_family": "MIT", + "summary": "SUMMARY SUMMARY SUMMARY", + "description": "DESCRIPTION DESCRIPTION DESCRIPTION", + }, + "source": { + "sha256": "4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732", + "url": "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz", + }, + "package": { + "name": "{{ name|lower }}", + "version": "{{ version }}", + }, + "build": { + "number": 0, + "script": "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation", + }, } + recipe_order = ["package", "source", "build", "about"] + recipe_yaml = dals( + """ + package: + name: "{{ name|lower }}" + version: "{{ version }}" - assert ( - _print_dict( - recipe_metadata, - order=["package", "source", "build", "about"], - ) - == """package: - name: "{{ name|lower }}" - version: "{{ version }}" - -source: - sha256: 4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732 - url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz" + source: + sha256: 4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732 + url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz" -build: - number: 0 - script: "{{ PYTHON }} -m pip install . -vv" + build: + number: 0 + script: "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" -about: - home: "https://conda.io" - license: MIT - license_family: MIT - summary: "SUMMARY SUMMARY SUMMARY" - description: "DESCRIPTION DESCRIPTION DESCRIPTION" + about: + home: "https://conda.io" + license: MIT + license_family: MIT + summary: "SUMMARY SUMMARY SUMMARY" + description: "DESCRIPTION DESCRIPTION DESCRIPTION" -""" + """ # yes, the trailing extra newline is necessary ) + assert _print_dict(recipe_metadata, order=recipe_order) == recipe_yaml diff --git a/tests/test_render.py b/tests/test_render.py index 140d75001f..940d090781 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -1,42 +1,136 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json import os +import re +from typing import TYPE_CHECKING +from uuid import uuid4 -from conda_build import api -from conda_build import render +import pytest +from conda_build.api import get_output_file_paths +from conda_build.render import ( + _simplify_to_exact_constraints, + find_pkg_dir_or_file_in_pkgs_dirs, + get_pin_from_build, + open_recipe, +) +from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1 -def test_output_with_noarch_says_noarch(testing_metadata): - testing_metadata.meta['build']['noarch'] = 'python' - output = api.get_output_file_path(testing_metadata) - assert os.path.sep + "noarch" + os.path.sep in output[0] +if TYPE_CHECKING: + from pathlib import Path + from conda_build.metadata import MetaData -def test_output_with_noarch_python_says_noarch(testing_metadata): - testing_metadata.meta['build']['noarch_python'] = True - output = api.get_output_file_path(testing_metadata) + +@pytest.mark.parametrize( + "build", + [ + pytest.param({"noarch": "python"}, id="noarch"), + pytest.param({"noarch_python": True}, id="noarch_python"), + ], +) +def test_noarch_output(build, testing_metadata): + testing_metadata.meta["build"].update(build) + output = get_output_file_paths(testing_metadata) assert os.path.sep + "noarch" + os.path.sep in output[0] def test_reduce_duplicate_specs(testing_metadata): - reqs = {"build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"], "host": ["exact", "exact 1.2.3 1"]} - testing_metadata.meta["requirements"] = reqs - render._simplify_to_exact_constraints(testing_metadata) - assert (testing_metadata.meta['requirements']['build'] == - testing_metadata.meta['requirements']['host']) - simplified_deps = testing_metadata.meta['requirements'] - assert len(simplified_deps['build']) == 1 - assert 'exact 1.2.3 1' in simplified_deps['build'] + testing_metadata.meta["requirements"] = { + "build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"], + "host": ["exact", "exact 1.2.3 1"], + } + _simplify_to_exact_constraints(testing_metadata) + simplified = testing_metadata.meta["requirements"] + + assert simplified["build"] == simplified["host"] + assert len(simplified["build"]) == 1 + assert "exact 1.2.3 1" in simplified["build"] def test_pin_run_as_build_preserve_string(testing_metadata): m = testing_metadata - m.config.variant['pin_run_as_build']['pkg'] = { - 'max_pin': 'x.x' - } - dep = render.get_pin_from_build( - m, - 'pkg * somestring*', - {'pkg': '1.2.3 somestring_h1234'} + m.config.variant["pin_run_as_build"]["pkg"] = {"max_pin": "x.x"} + dep = get_pin_from_build(m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"}) + assert dep == "pkg >=1.2.3,<1.3.0a0 somestring*" + + +@pytest.mark.parametrize( + "create_package,subdir,is_file,files_only", + [ + pytest.param(False, None, None, None, id="not found"), + pytest.param(True, None, False, False, id="directory"), + pytest.param(True, None, False, True, id="on demand"), + pytest.param(True, "magic", False, True, id="on demand, different subdir"), + pytest.param(True, None, True, None, id="file"), + ], +) +def test_find_package( + testing_metadata: MetaData, + tmp_path: Path, + create_package: bool, + subdir: str | None, + is_file: bool, + files_only: bool, +): + """ + Testing our ability to find the package directory or archive. + + The find_pkg_dir_or_file_in_pkgs_dirs function will scan the various + locations where packages may exist locally and returns the full package path + if found. + """ + # setup + distribution = uuid4().hex[:20] + testing_metadata.config.croot = tmp_path + host_cache = tmp_path / testing_metadata.config.host_subdir + host_cache.mkdir() + subdir = subdir or testing_metadata.config.host_subdir + other_cache = tmp_path / subdir + other_cache.mkdir(exist_ok=True) + + # generate a dummy package as needed + package = None + if create_package: + # generate dummy package + if is_file: + (host_cache / (distribution + CONDA_PACKAGE_EXTENSION_V1)).touch() + else: + info = host_cache / distribution / "info" + info.mkdir(parents=True) + (info / "index.json").write_text(json.dumps({"subdir": subdir})) + + # expected package path + if is_file or files_only: + package = other_cache / (distribution + CONDA_PACKAGE_EXTENSION_V1) + else: + package = other_cache / distribution + + # attempt to find the package and check we found the expected path + found = find_pkg_dir_or_file_in_pkgs_dirs( + distribution, + testing_metadata, + files_only=files_only, ) - assert dep == 'pkg >=1.2.3,<1.3.0a0 somestring*' + assert package is found is None or package.samefile(found) + + +def test_open_recipe(tmp_path: Path): + path = tmp_path / "missing" + with pytest.raises( + SystemExit, + match=rf"Error: non-existent: {re.escape(str(path))}", + ): + with open_recipe(path): + pass + + (path := tmp_path / "bad.ext").touch() + with pytest.raises( + SystemExit, + match=rf"Error: non-recipe: {re.escape(str(path))}", + ): + with open_recipe(path): + pass diff --git a/tests/test_source.py b/tests/test_source.py index 6776a3815d..1cae2f9997 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,160 +5,198 @@ import tarfile import pytest +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import hashsum_file, TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator + from .utils import thisdir def test_alternative_url_no_fn(testing_metadata): - testing_metadata.meta['source'] = {'url': [ - os.path.join(thisdir, 'archives', 'a.tar.bz2'), - os.path.join(thisdir, 'archives', 'a.tar.bz2'), - ]} + testing_metadata.meta["source"] = { + "url": [ + os.path.join(thisdir, "archives", "a.tar.bz2"), + os.path.join(thisdir, "archives", "a.tar.bz2"), + ] + } source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'a')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "a")) def test_multiple_url_sources(testing_metadata): - - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f2', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f2", "url": os.path.join(thisdir, "archives", "b.tar.bz2")}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'b')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f2")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f2", "b")) def test_multiple_url_sources_into_same_folder(testing_metadata): - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f1", "url": os.path.join(thisdir, "archives", "b.tar.bz2")}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'b')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "b")) def test_extract_tarball_with_subfolders_moves_files(testing_metadata): """Ensure that tarballs that contain only a single folder get their contents hoisted up one level""" - testing_metadata.meta['source'] = { - 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')} + testing_metadata.meta["source"] = { + "url": os.path.join(thisdir, "archives", "subfolder.tar.bz2") + } source.provide(testing_metadata) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'abc')) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder") + ) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "abc")) def test_extract_multiple_tarballs_with_subfolders_flattens_all(testing_metadata): """Ensure that tarballs that contain only a single folder get their contents hoisted up one level""" - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')}, - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder2.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "subfolder.tar.bz2")}, + { + "folder": "f1", + "url": os.path.join(thisdir, "archives", "subfolder2.tar.bz2"), + }, + ] source.provide(testing_metadata) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder')) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder2')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'abc')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'def')) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder") + ) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder2") + ) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "abc")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "def")) def test_multiple_different_sources(testing_metadata): - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f2', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f2", "git_url": "https://github.com/conda/conda_build_test_recipe"}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists( + os.path.join(testing_metadata.config.work_dir, "f2", "README.md") + ) # Test get_value() indexing syntax. - assert testing_metadata.get_value('source/url') == testing_metadata.meta['source'][0]['url'] - assert testing_metadata.get_value('source/0/url') == testing_metadata.meta['source'][0]['url'] - assert (testing_metadata.get_value('source/1/git_url') == - testing_metadata.meta['source'][1]['git_url']) + assert ( + testing_metadata.get_value("source/url") + == testing_metadata.meta["source"][0]["url"] + ) + assert ( + testing_metadata.get_value("source/0/url") + == testing_metadata.meta["source"][0]["url"] + ) + assert ( + testing_metadata.get_value("source/1/git_url") + == testing_metadata.meta["source"][1]["git_url"] + ) def test_git_into_existing_populated_folder_raises(testing_metadata): """Git will not clone into a non-empty folder. This should raise an exception.""" - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f1', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f1", "git_url": "https://github.com/conda/conda_build_test_recipe"}, + ] with pytest.raises(subprocess.CalledProcessError): source.provide(testing_metadata) def test_git_repo_with_single_subdir_does_not_enter_subdir(testing_metadata): - """Regression test for https://github.com/conda/conda-build/issues/1910 """ - testing_metadata.meta['source'] = { - 'git_url': 'https://github.com/conda/conda_build_single_folder_test'} + """Regression test for https://github.com/conda/conda-build/issues/1910""" + testing_metadata.meta["source"] = { + "git_url": "https://github.com/conda/conda_build_single_folder_test" + } source.provide(testing_metadata) - assert os.path.basename(testing_metadata.config.work_dir) != 'one_folder' + assert os.path.basename(testing_metadata.config.work_dir) != "one_folder" @pytest.mark.sanity -def test_source_user_expand(testing_workdir): - with TemporaryDirectory(dir=os.path.expanduser('~')) as tmp: +def test_source_user_expand(): + with TemporaryDirectory(dir=os.path.expanduser("~")) as tmp: with TemporaryDirectory() as tbz_srcdir: file_txt = os.path.join(tbz_srcdir, "file.txt") - with open(file_txt, 'w') as f: + with open(file_txt, "w") as f: f.write("hello") tbz_name = os.path.join(tmp, "cb-test.tar.bz2") with tarfile.open(tbz_name, "w:bz2") as tar: tar.add(tbz_srcdir, arcname=os.path.sep) - for prefix in ('~', 'file:///~'): - source_dict = {"url": os.path.join(prefix, os.path.basename(tmp), "cb-test.tar.bz2"), - "sha256": hashsum_file(tbz_name, 'sha256')} + for prefix in ("~", "file:///~"): + source_dict = { + "url": os.path.join( + prefix, os.path.basename(tmp), "cb-test.tar.bz2" + ), + "sha256": compute_sum(tbz_name, "sha256"), + } with TemporaryDirectory() as tmp2: - download_to_cache(tmp2, '', source_dict) + download_to_cache(tmp2, "", source_dict) def test_hoist_same_name(testing_workdir): - testdir = os.path.join(testing_workdir, 'test', 'test') - outer_dir = os.path.join(testing_workdir, 'test') + testdir = os.path.join(testing_workdir, "test", "test") + outer_dir = os.path.join(testing_workdir, "test") os.makedirs(testdir) - with open(os.path.join(testdir, 'somefile'), 'w') as f: - f.write('weeeee') + with open(os.path.join(testdir, "somefile"), "w") as f: + f.write("weeeee") source.hoist_single_extracted_folder(testdir) - assert os.path.isfile(os.path.join(outer_dir, 'somefile')) + assert os.path.isfile(os.path.join(outer_dir, "somefile")) assert not os.path.isdir(testdir) def test_hoist_different_name(testing_workdir): - testdir = os.path.join(testing_workdir, 'test') - nesteddir = os.path.join(testdir, 'test_name') + testdir = os.path.join(testing_workdir, "test") + nesteddir = os.path.join(testdir, "test_name") os.makedirs(nesteddir) - with open(os.path.join(nesteddir, 'somefile'), 'w') as f: - f.write('weeeee') + with open(os.path.join(nesteddir, "somefile"), "w") as f: + f.write("weeeee") source.hoist_single_extracted_folder(nesteddir) - assert os.path.isfile(os.path.join(testdir, 'somefile')) + assert os.path.isfile(os.path.join(testdir, "somefile")) assert not os.path.isdir(nesteddir) -def test_append_hash_to_fn(testing_metadata, caplog): - relative_zip = 'testfn.zip' - assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip' - relative_tar_gz = 'testfn.tar.gz' - assert source.append_hash_to_fn(relative_tar_gz, '123') == 'testfn_123.tar.gz' - absolute_zip = '/abc/testfn.zip' - assert source.append_hash_to_fn(absolute_zip, '123') == '/abc/testfn_123.zip' - absolute_tar_gz = '/abc/testfn.tar.gz' - assert source.append_hash_to_fn(absolute_tar_gz, '123') == '/abc/testfn_123.tar.gz' - absolute_win_zip = 'C:\\abc\\testfn.zip' - assert source.append_hash_to_fn(absolute_win_zip, '123') == 'C:\\abc\\testfn_123.zip' - absolute_win_tar_gz = 'C:\\abc\\testfn.tar.gz' - assert source.append_hash_to_fn(absolute_win_tar_gz, '123') == 'C:\\abc\\testfn_123.tar.gz' - relative_whl = 'setuptools-36.4.0-py2.py3-none-any.whl' - assert source.append_hash_to_fn(relative_whl, '123') == 'setuptools-36.4.0-py2.py3-none-any_123.whl' - - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}] +def test_append_hash_to_fn(testing_metadata): + relative_zip = "testfn.zip" + assert source.append_hash_to_fn(relative_zip, "123") == "testfn_123.zip" + relative_tar_gz = "testfn.tar.gz" + assert source.append_hash_to_fn(relative_tar_gz, "123") == "testfn_123.tar.gz" + absolute_zip = "/abc/testfn.zip" + assert source.append_hash_to_fn(absolute_zip, "123") == "/abc/testfn_123.zip" + absolute_tar_gz = "/abc/testfn.tar.gz" + assert source.append_hash_to_fn(absolute_tar_gz, "123") == "/abc/testfn_123.tar.gz" + absolute_win_zip = "C:\\abc\\testfn.zip" + assert ( + source.append_hash_to_fn(absolute_win_zip, "123") == "C:\\abc\\testfn_123.zip" + ) + absolute_win_tar_gz = "C:\\abc\\testfn.tar.gz" + assert ( + source.append_hash_to_fn(absolute_win_tar_gz, "123") + == "C:\\abc\\testfn_123.tar.gz" + ) + relative_whl = "setuptools-36.4.0-py2.py3-none-any.whl" + assert ( + source.append_hash_to_fn(relative_whl, "123") + == "setuptools-36.4.0-py2.py3-none-any_123.whl" + ) + + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")} + ] reset_deduplicator() source.provide(testing_metadata) - # TODO: Can't seem to get this to work. Test passes when run by itself, but fails when run in whole - # serial suite. Some residual state, somehow. I suspect the deduplicator logic with the logger, - # but attempts to reset it have not been successful. - # assert any("No hash (md5, sha1, sha256) provided." in rec.message for rec in caplog.records) diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index e2b0b59855..11e43383d0 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -1,60 +1,79 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from glob import glob import json import os -import pytest import re import sys +from glob import glob +from pathlib import Path -from conda_build.render import finalize_metadata -from conda_build.conda_interface import subdir -from conda_build import api, utils - -from .utils import subpackage_dir, is_valid_dir +import pytest +from conda.base.context import context +from conda_build import api, utils +from conda_build.metadata import MetaDataTuple +from conda_build.render import finalize_metadata -@pytest.fixture(params=[dirname for dirname in os.listdir(subpackage_dir) - if is_valid_dir(subpackage_dir, dirname)]) -def recipe(request): - return os.path.join(subpackage_dir, request.param) +from .utils import get_valid_recipes, subpackage_dir @pytest.mark.slow -def test_subpackage_recipes(recipe, testing_config): - api.build(recipe, config=testing_config) +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(subpackage_dir) + ], +) +def test_subpackage_recipes(recipe: Path, testing_config): + api.build(str(recipe), config=testing_config) @pytest.mark.sanity def test_autodetect_raises_on_invalid_extension(testing_config): with pytest.raises(NotImplementedError): - api.build(os.path.join(subpackage_dir, '_invalid_script_extension'), config=testing_config) + api.build( + os.path.join(subpackage_dir, "_invalid_script_extension"), + config=testing_config, + ) # regression test for https://github.com/conda/conda-build/issues/1661 -def test_rm_rf_does_not_remove_relative_source_package_files(testing_config, monkeypatch): - recipe_dir = os.path.join(subpackage_dir, '_rm_rf_stays_within_prefix') +def test_rm_rf_does_not_remove_relative_source_package_files( + testing_config, monkeypatch +): + recipe_dir = os.path.join(subpackage_dir, "_rm_rf_stays_within_prefix") monkeypatch.chdir(recipe_dir) - bin_file_that_disappears = os.path.join('bin', 'lsfm') + bin_file_that_disappears = os.path.join("bin", "lsfm") if not os.path.isfile(bin_file_that_disappears): - with open(bin_file_that_disappears, 'w') as f: - f.write('weee') + with open(bin_file_that_disappears, "w") as f: + f.write("weee") assert os.path.isfile(bin_file_that_disappears) - api.build('conda', config=testing_config) + api.build("conda", config=testing_config) assert os.path.isfile(bin_file_that_disappears) def test_output_pkg_path_shows_all_subpackages(testing_metadata): - testing_metadata.meta['outputs'] = [{'name': 'a'}, {'name': 'b'}] + testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_paths( + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] + ) assert len(outputs) == 2 def test_subpackage_version_provided(testing_metadata): - testing_metadata.meta['outputs'] = [{'name': 'a', 'version': '2.0'}] + testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_paths( + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] + ) assert len(outputs) == 1 assert "a-2.0-1" in outputs[0] @@ -62,28 +81,32 @@ def test_subpackage_version_provided(testing_metadata): def test_subpackage_independent_hash(testing_metadata): # this recipe is creating 2 outputs. One is the output here, a. The other is the top-level # output, implicitly created by adding the run requirement. - testing_metadata.meta['outputs'] = [{'name': 'a', 'requirements': 'bzip2'}] - testing_metadata.meta['requirements']['run'] = ['a'] + testing_metadata.meta["outputs"] = [{"name": "a", "requirements": "bzip2"}] + testing_metadata.meta["requirements"]["run"] = ["a"] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_paths( + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] + ) assert len(outputs) == 2 assert outputs[0][-15:] != outputs[1][-15:] def test_run_exports_in_subpackage(testing_metadata): p1 = testing_metadata.copy() - p1.meta['outputs'] = [{'name': 'has_run_exports', 'run_exports': 'bzip2 1.0'}] + p1.meta["outputs"] = [{"name": "has_run_exports", "run_exports": "bzip2 1.0"}] api.build(p1, config=testing_metadata.config)[0] - # api.update_index(os.path.dirname(output), config=testing_metadata.config) p2 = testing_metadata.copy() - p2.meta['requirements']['host'] = ['has_run_exports'] + p2.meta["requirements"]["host"] = ["has_run_exports"] p2_final = finalize_metadata(p2) - assert 'bzip2 1.0.*' in p2_final.meta['requirements']['run'] + assert "bzip2 1.0.*" in p2_final.meta["requirements"]["run"] def test_subpackage_variant_override(testing_config): - recipe = os.path.join(subpackage_dir, '_variant_override') + recipe = os.path.join(subpackage_dir, "_variant_override") outputs = api.build(recipe, config=testing_config) # Three total: # one subpackage with no deps - one output @@ -91,8 +114,8 @@ def test_subpackage_variant_override(testing_config): assert len(outputs) == 3 -def test_intradependencies(testing_workdir, testing_config): - recipe = os.path.join(subpackage_dir, '_intradependencies') +def test_intradependencies(testing_config): + recipe = os.path.join(subpackage_dir, "_intradependencies") outputs1 = api.get_output_file_paths(recipe, config=testing_config) outputs1_set = {os.path.basename(p) for p in outputs1} # 2 * abc + 1 foo + 2 * (2 * abc, 1 * lib, 1 * foo) @@ -100,121 +123,155 @@ def test_intradependencies(testing_workdir, testing_config): outputs2 = api.build(recipe, config=testing_config) assert len(outputs2) == 11 outputs2_set = {os.path.basename(p) for p in outputs2} - assert outputs1_set == outputs2_set, 'pkgs differ :: get_output_file_paths()={} but build()={}'.format(outputs1_set, - outputs2_set) + assert ( + outputs1_set == outputs2_set + ), f"pkgs differ :: get_output_file_paths()={outputs1_set} but build()={outputs2_set}" -def test_git_in_output_version(testing_config): - recipe = os.path.join(subpackage_dir, '_git_in_output_version') - outputs = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) - assert len(outputs) == 1 - assert outputs[0][0].version() == '1.21.11' +def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): + recipe = os.path.join(subpackage_dir, "_git_in_output_version") + metadata_tuples = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].version() == "1.22.0" def test_intradep_with_templated_output_name(testing_config): - recipe = os.path.join(subpackage_dir, '_intradep_with_templated_output_name') - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 - expected_names = {'test_templated_subpackage_name', 'templated_subpackage_nameabc', - 'depends_on_templated'} - assert {m.name() for (m, _, _) in metadata} == expected_names + recipe = os.path.join(subpackage_dir, "_intradep_with_templated_output_name") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 + expected_names = { + "test_templated_subpackage_name", + "templated_subpackage_nameabc", + "depends_on_templated", + } + assert {metadata.name() for metadata, _, _ in metadata_tuples} == expected_names def test_output_specific_subdir(testing_config): - recipe = os.path.join(subpackage_dir, '_output_specific_subdir') - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 - for (m, _, _) in metadata: - if m.name() in ('default_subdir', 'default_subdir_2'): - assert m.config.target_subdir == subdir - elif m.name() == 'custom_subdir': - assert m.config.target_subdir == 'linux-aarch64' + recipe = os.path.join(subpackage_dir, "_output_specific_subdir") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("default_subdir", "default_subdir_2"): + assert metadata.config.target_subdir == context.subdir + elif metadata.name() == "custom_subdir": + assert metadata.config.target_subdir == "linux-aarch64" else: - raise AssertionError("Test for output_specific_subdir written incorrectly - " - "package name not recognized") + raise AssertionError( + "Test for output_specific_subdir written incorrectly - " + "package name not recognized" + ) def test_about_metadata(testing_config): - recipe = os.path.join(subpackage_dir, '_about_metadata') - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - for m, _, _ in metadata: - if m.name() == 'abc': - assert 'summary' in m.meta['about'] - assert m.meta['about']['summary'] == 'weee' - assert 'home' not in m.meta['about'] - elif m.name() == 'def': - assert 'home' in m.meta['about'] - assert 'summary' not in m.meta['about'] - assert m.meta['about']['home'] == 'http://not.a.url' + recipe = os.path.join(subpackage_dir, "_about_metadata") + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + for metadata, _, _ in metadata_tuples: + if metadata.name() == "abc": + assert "summary" in metadata.meta["about"] + assert metadata.meta["about"]["summary"] == "weee" + assert "home" not in metadata.meta["about"] + elif metadata.name() == "def": + assert "home" in metadata.meta["about"] + assert "summary" not in metadata.meta["about"] + assert metadata.meta["about"]["home"] == "http://not.a.url" outs = api.build(recipe, config=testing_config) for out in outs: - about_meta = utils.package_has_file(out, 'info/about.json') + about_meta = utils.package_has_file(out, "info/about.json") assert about_meta info = json.loads(about_meta) - if os.path.basename(out).startswith('abc'): - assert 'summary' in info - assert info['summary'] == 'weee' - assert 'home' not in info - elif os.path.basename(out).startswith('def'): - assert 'home' in info - assert 'summary' not in info - assert info['home'] == 'http://not.a.url' + if os.path.basename(out).startswith("abc"): + assert "summary" in info + assert info["summary"] == "weee" + assert "home" not in info + elif os.path.basename(out).startswith("def"): + assert "home" in info + assert "summary" not in info + assert info["home"] == "http://not.a.url" @pytest.mark.slow def test_toplevel_entry_points_do_not_apply_to_subpackages(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_entry_points') + recipe_dir = os.path.join(subpackage_dir, "_entry_points") outputs = api.build(recipe_dir, config=testing_config) if utils.on_win: - script_dir = 'Scripts' - ext = '.exe' + script_dir = "Scripts" + ext = ".exe" else: - script_dir = 'bin' - ext = '' + script_dir = "bin" + ext = "" for out in outputs: fn = os.path.basename(out) - if fn.startswith('split_package_entry_points1'): - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - elif fn.startswith('split_package_entry_points2'): - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - elif fn.startswith('test_split_package_entry_points'): + if fn.startswith("split_package_entry_points1"): + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + elif fn.startswith("split_package_entry_points2"): + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + elif fn.startswith("test_split_package_entry_points"): # python commands will make sure that these are available. - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) else: - raise ValueError(f"Didn't see any of the 3 expected filenames. Filename was {fn}") + raise ValueError( + f"Didn't see any of the 3 expected filenames. Filename was {fn}" + ) def test_subpackage_hash_inputs(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_hash_inputs') + recipe_dir = os.path.join(subpackage_dir, "_hash_inputs") outputs = api.build(recipe_dir, config=testing_config) assert len(outputs) == 2 for out in outputs: - if os.path.basename(out).startswith('test_subpackage'): - assert utils.package_has_file(out, 'info/recipe/install-script.sh') + if os.path.basename(out).startswith("test_subpackage"): + assert utils.package_has_file(out, "info/recipe/install-script.sh") # will have full parent recipe in nested folder - assert utils.package_has_file(out, 'info/recipe/parent/build.sh') - assert not utils.package_has_file(out, 'info/recipe/meta.yaml.template') - assert utils.package_has_file(out, 'info/recipe/meta.yaml') + assert utils.package_has_file(out, "info/recipe/parent/build.sh") + assert not utils.package_has_file(out, "info/recipe/meta.yaml.template") + assert utils.package_has_file(out, "info/recipe/meta.yaml") else: - assert utils.package_has_file(out, 'info/recipe/install-script.sh') - assert utils.package_has_file(out, 'info/recipe/build.sh') + assert utils.package_has_file(out, "info/recipe/install-script.sh") + assert utils.package_has_file(out, "info/recipe/build.sh") # will have full parent recipe in base recipe folder (this is an output for the top level) - assert utils.package_has_file(out, 'info/recipe/meta.yaml.template') - assert utils.package_has_file(out, 'info/recipe/meta.yaml') + assert utils.package_has_file(out, "info/recipe/meta.yaml.template") + assert utils.package_has_file(out, "info/recipe/meta.yaml") def test_overlapping_files(testing_config, caplog): - recipe_dir = os.path.join(subpackage_dir, '_overlapping_files') + recipe_dir = os.path.join(subpackage_dir, "_overlapping_files") utils.reset_deduplicator() outputs = api.build(recipe_dir, config=testing_config) assert len(outputs) == 3 @@ -223,7 +280,7 @@ def test_overlapping_files(testing_config, caplog): @pytest.mark.sanity def test_per_output_tests(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_per_output_tests') + recipe_dir = os.path.join(subpackage_dir, "_per_output_tests") api.build(recipe_dir, config=testing_config) # out, err = capfd.readouterr() # windows echoes commands, so we see the result and the command @@ -234,97 +291,133 @@ def test_per_output_tests(testing_config): @pytest.mark.sanity def test_per_output_tests_script(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_output_test_script') + recipe_dir = os.path.join(subpackage_dir, "_output_test_script") with pytest.raises(SystemExit): api.build(recipe_dir, config=testing_config) def test_pin_compatible_in_outputs(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_pin_compatible_in_output') - m = api.render(recipe_dir, config=testing_config)[0][0] - assert any(re.search(r'numpy\s*>=.*,<.*', req) for req in m.meta['requirements']['run']) + recipe_dir = os.path.join(subpackage_dir, "_pin_compatible_in_output") + metadata = api.render(recipe_dir, config=testing_config)[0][0] + assert any( + re.search(r"numpy\s*>=.*,<.*", req) + for req in metadata.meta["requirements"]["run"] + ) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_output_named_same_as_top_level') - ms = api.render(recipe_dir, config=testing_config) + recipe_dir = os.path.join(subpackage_dir, "_output_named_same_as_top_level") + metadata_tuples = api.render(recipe_dir, config=testing_config) # TODO: need to decide what best behavior is for saying whether the # top-level build reqs or the output reqs for the similarly naemd output # win. I think you could have both, but it means rendering a new, extra, # build-only metadata in addition to all the outputs - for m, _, _ in ms: - if m.name() == 'ipp': - for env in ('build', 'host', 'run'): - assert not m.meta.get('requirements', {}).get(env) + for metadata, _, _ in metadata_tuples: + if metadata.name() == "ipp": + for env in ("build", "host", "run"): + assert not metadata.meta.get("requirements", {}).get(env) def test_subpackage_order_natural(testing_config): - recipe = os.path.join(subpackage_dir, '_order') + recipe = os.path.join(subpackage_dir, "_order") outputs = api.build(recipe, config=testing_config) assert len(outputs) == 2 def test_subpackage_order_bad(testing_config): - recipe = os.path.join(subpackage_dir, '_order_bad') + recipe = os.path.join(subpackage_dir, "_order_bad") outputs = api.build(recipe, config=testing_config) assert len(outputs) == 2 @pytest.mark.sanity def test_subpackage_script_and_files(testing_config): - recipe = os.path.join(subpackage_dir, '_script_and_files') + recipe = os.path.join(subpackage_dir, "_script_and_files") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_build_script_and_script_env(testing_config): - recipe = os.path.join(subpackage_dir, '_build_script') - os.environ['TEST_FN'] = 'test' + recipe = os.path.join(subpackage_dir, "_build_script") + os.environ["TEST_FN"] = "test" api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'darwin', reason="only implemented for mac") +def test_build_script_and_script_env_warn_empty_script_env(testing_config): + recipe = os.path.join(subpackage_dir, "_build_script_missing_var") + with pytest.warns( + UserWarning, + match="The environment variable 'TEST_FN_DOESNT_EXIST' specified in script_env is undefined", + ): + api.build(recipe, config=testing_config) + + +@pytest.mark.sanity +@pytest.mark.skipif(sys.platform != "darwin", reason="only implemented for mac") def test_strong_run_exports_from_build_applies_to_host(testing_config): - recipe = os.path.join(subpackage_dir, '_strong_run_exports_applies_from_build_to_host') + recipe = os.path.join( + subpackage_dir, "_strong_run_exports_applies_from_build_to_host" + ) api.build(recipe, config=testing_config) -@pytest.mark.parametrize("recipe", ('_line_up_python_compiled_libs', - '_line_up_python_compiled_libs_top_level_same_name_output')) +@pytest.mark.parametrize( + "recipe", + ( + "_line_up_python_compiled_libs", + "_line_up_python_compiled_libs_top_level_same_name_output", + ), +) def test_python_line_up_with_compiled_lib(recipe, testing_config): recipe = os.path.join(subpackage_dir, recipe) # we use windows so that we have 2 libxyz results (VS2008, VS2015) - ms = api.render(recipe, config=testing_config, platform='win', arch='64') + metadata_tuples = api.render( + recipe, config=testing_config, platform="win", arch="64" + ) # 2 libxyz, 3 py-xyz, 3 xyz - assert len(ms) == 8 - for m, _, _ in ms: - if m.name() in ('py-xyz' or 'xyz'): - deps = m.meta['requirements']['run'] - assert any(dep.startswith('libxyz ') and len(dep.split()) == 3 for dep in deps), (m.name(), deps) - assert any(dep.startswith('python >') for dep in deps), (m.name(), deps) - assert any(dep.startswith('zlib >') for dep in deps), (m.name(), deps) - if m.name() == 'xyz': - deps = m.meta['requirements']['run'] - assert any(dep.startswith('py-xyz ') and len(dep.split()) == 3 for dep in deps), (m.name(), deps) - assert any(dep.startswith('python >') for dep in deps), (m.name(), deps) - - -@pytest.mark.xfail(sys.platform == "win32", - reason="Defaults channel has conflicting vc packages") + assert len(metadata_tuples) == 8 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("py-xyz" or "xyz"): + deps = metadata.meta["requirements"]["run"] + assert any( + dep.startswith("libxyz ") and len(dep.split()) == 3 for dep in deps + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) + assert any(dep.startswith("zlib >") for dep in deps), ( + metadata.name(), + deps, + ) + if metadata.name() == "xyz": + deps = metadata.meta["requirements"]["run"] + assert any( + dep.startswith("py-xyz ") and len(dep.split()) == 3 for dep in deps + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) + + +@pytest.mark.xfail( + sys.platform == "win32", reason="Defaults channel has conflicting vc packages" +) def test_merge_build_host_applies_in_outputs(testing_config): - recipe = os.path.join(subpackage_dir, '_merge_build_host') - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: + recipe = os.path.join(subpackage_dir, "_merge_build_host") + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: # top level - if m.name() == 'test_build_host_merge': - assert not m.meta.get('requirements', {}).get('run') + if metadata.name() == "test_build_host_merge": + assert not metadata.meta.get("requirements", {}).get("run") # output else: - run_exports = set(m.meta.get('build', {}).get('run_exports', [])) + run_exports = set(metadata.meta.get("build", {}).get("run_exports", [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) - run_deps = set(m.meta.get('requirements', {}).get('run', [])) + run_deps = set(metadata.meta.get("requirements", {}).get("run", [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) @@ -333,53 +426,67 @@ def test_merge_build_host_applies_in_outputs(testing_config): @pytest.mark.sanity def test_activation_in_output_scripts(testing_config): - recipe = os.path.join(subpackage_dir, '_output_activation') + recipe = os.path.join(subpackage_dir, "_output_activation") testing_config.activate = True api.build(recipe, config=testing_config) def test_inherit_build_number(testing_config): - recipe = os.path.join(subpackage_dir, '_inherit_build_number') - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - assert 'number' in m.meta['build'], "build number was not inherited at all" - assert int(m.meta['build']['number']) == 1, "build number should have been inherited as '1'" + recipe = os.path.join(subpackage_dir, "_inherit_build_number") + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + assert ( + "number" in metadata.meta["build"] + ), "build number was not inherited at all" + assert ( + int(metadata.meta["build"]["number"]) == 1 + ), "build number should have been inherited as '1'" def test_circular_deps_cross(testing_config): - recipe = os.path.join(subpackage_dir, '_circular_deps_cross') + recipe = os.path.join(subpackage_dir, "_circular_deps_cross") # check that this does not raise an exception api.render(recipe, config=testing_config) @pytest.mark.slow def test_loops_do_not_remove_earlier_packages(testing_config): - recipe = os.path.join(subpackage_dir, '_xgboost_example') + recipe = os.path.join(subpackage_dir, "_xgboost_example") output_files = api.get_output_file_paths(recipe, config=testing_config) api.build(recipe, config=testing_config) - assert len(output_files) == len(glob(os.path.join(testing_config.croot, testing_config.host_subdir, "*.tar.bz2"))) + assert len(output_files) == len( + glob( + os.path.join(testing_config.croot, testing_config.host_subdir, "*.tar.bz2") + ) + ) # regression test for https://github.com/conda/conda-build/issues/3248 @pytest.mark.skipif( utils.on_win and sys.version_info <= (3, 4), - reason="Skipping it on windows and vc<14" + reason="Skipping it on windows and vc<14", ) def test_build_string_does_not_incorrectly_add_hash(testing_config): - recipe = os.path.join(subpackage_dir, '_build_string_with_variant') + recipe = os.path.join(subpackage_dir, "_build_string_with_variant") output_files = api.get_output_file_paths(recipe, config=testing_config) assert len(output_files) == 4 assert any("clang_variant-1.0-cling.tar.bz2" in f for f in output_files) assert any("clang_variant-1.0-default.tar.bz2" in f for f in output_files) -# def test_conda_pkg_v2_format(testing_config): -# recipe = os.path.join(subpackage_dir, '_alternate_type_conda2') -# output_files = api.get_output_file_paths(recipe, config=testing_config) -# assert len(output_files) == 1 -# assert output_files[0].endswith('.conda'), output_files[0] +def test_multi_outputs_without_package_version(testing_config): + # outputs without package/version is allowed + recipe = os.path.join(subpackage_dir, "_multi_outputs_without_package_version") + outputs = api.build(recipe, config=testing_config) + assert len(outputs) == 3 + assert outputs[0].endswith("a-1-0.tar.bz2") + assert outputs[1].endswith("b-2-0.tar.bz2") + assert outputs[2].endswith("c-3-0.tar.bz2") + -# out_files = api.build(recipe, config=testing_config) -# assert len(out_files) == 1 -# assert out_files[0].endswith('.conda'), out_files[0] +def test_empty_outputs_requires_package_version(testing_config): + # no outputs means package/version is required + recipe = os.path.join(subpackage_dir, "_empty_outputs_requires_package_version") + with pytest.raises(SystemExit, match="package/version missing"): + api.build(recipe, config=testing_config) diff --git a/tests/test_utils.py b/tests/test_utils.py index 5aa6469b72..70a2981203 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,35 +1,27 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import contextlib -import filelock import os import subprocess import sys +from pathlib import Path from typing import NamedTuple +import filelock import pytest +from pytest import MonkeyPatch -from conda_build.exceptions import BuildLockError import conda_build.utils as utils +from conda_build.exceptions import BuildLockError -def makefile(name, contents=""): - name = os.path.abspath(name) - path = os.path.dirname(name) - - if not os.path.exists(path): - os.makedirs(path) - - with open(name, 'w') as f: - f.write(contents) - - -@pytest.mark.skipif(utils.on_win, reason="only unix has python version in site-packages path") +@pytest.mark.skipif( + utils.on_win, reason="only unix has python version in site-packages path" +) def test_get_site_packages(): # https://github.com/conda/conda-build/issues/1055#issuecomment-250961576 # crazy unreal python version that should show up in a second - crazy_path = os.path.join('/dummy', 'lib', 'python8.2', 'site-packages') - site_packages = utils.get_site_packages('/dummy', '8.2') + crazy_path = os.path.join("/dummy", "lib", "python8.2", "site-packages") + site_packages = utils.get_site_packages("/dummy", "8.2") assert site_packages == crazy_path @@ -41,164 +33,92 @@ def test_prepend_sys_path(): def test_copy_source_tree(namespace_setup): - dst = os.path.join(namespace_setup, 'dest') - utils.copy_into(os.path.join(namespace_setup, 'namespace'), dst) - assert os.path.isfile(os.path.join(dst, 'package', 'module.py')) + dst = os.path.join(namespace_setup, "dest") + utils.copy_into(os.path.join(namespace_setup, "namespace"), dst) + assert os.path.isfile(os.path.join(dst, "package", "module.py")) def test_merge_namespace_trees(namespace_setup): - dep = os.path.join(namespace_setup, 'other_tree', 'namespace', 'package', 'dependency.py') - makefile(dep) - - utils.copy_into(os.path.join(namespace_setup, 'other_tree'), namespace_setup) - assert os.path.isfile(os.path.join(namespace_setup, 'namespace', 'package', - 'module.py')) + dep = Path(namespace_setup, "other_tree", "namespace", "package", "dependency.py") + dep.parent.mkdir(parents=True, exist_ok=True) + dep.touch() + + utils.copy_into(os.path.join(namespace_setup, "other_tree"), namespace_setup) + assert os.path.isfile( + os.path.join(namespace_setup, "namespace", "package", "module.py") + ) assert os.path.isfile(dep) -@pytest.fixture(scope='function') -def namespace_setup(testing_workdir, request): - namespace = os.path.join(testing_workdir, 'namespace') - package = os.path.join(namespace, 'package') - makefile(os.path.join(package, "module.py")) +@pytest.fixture(scope="function") +def namespace_setup(testing_workdir: os.PathLike) -> os.PathLike: + module = Path(testing_workdir, "namespace", "package", "module.py") + module.parent.mkdir(parents=True, exist_ok=True) + module.touch() return testing_workdir @pytest.mark.sanity -def test_disallow_merge_conflicts(namespace_setup, testing_config): - duplicate = os.path.join(namespace_setup, 'dupe', 'namespace', 'package', 'module.py') - makefile(duplicate) +def test_disallow_merge_conflicts(namespace_setup: os.PathLike): + duplicate = Path(namespace_setup, "dupe", "namespace", "package", "module.py") + duplicate.parent.mkdir(parents=True, exist_ok=True) + duplicate.touch() + with pytest.raises(IOError): - utils.merge_tree(os.path.dirname(duplicate), os.path.join(namespace_setup, 'namespace', - 'package')) + utils.merge_tree( + os.path.dirname(duplicate), + os.path.join(namespace_setup, "namespace", "package"), + ) @pytest.mark.sanity def test_disallow_in_tree_merge(testing_workdir): - with open('testfile', 'w') as f: + with open("testfile", "w") as f: f.write("test") with pytest.raises(AssertionError): - utils.merge_tree(testing_workdir, os.path.join(testing_workdir, 'subdir')) - - -def test_relative_default(): - for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff4/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), - ]: - assert utils.relative(f) == r - - -def test_relative_lib(): - for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff3/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), - ('bin/somedir/somedir2/cmd', '../../../lib'), - ]: - assert utils.relative(f, 'lib') == r - - -def test_relative_subdir(): - for f, r in [ - ('lib/libhdf5.so', './sub'), - ('lib/sub/libhdf5.so', '.'), - ('bin/python', '../lib/sub'), - ('bin/somedir/cmd', '../../lib/sub'), - ]: - assert utils.relative(f, 'lib/sub') == r - - -def test_relative_prefix(): - for f, r in [ - ('xyz', '.'), - ('a/xyz', '..'), - ('a/b/xyz', '../..'), - ('a/b/c/xyz', '../../..'), - ('a/b/c/d/xyz', '../../../..'), - ]: - assert utils.relative(f, '.') == r - - -def test_relative_2(): - for f, r in [ - ('a/b/c/d/libhdf5.so', '../..'), - ('a/b/c/libhdf5.so', '..'), - ('a/b/libhdf5.so', '.'), - ('a/libhdf5.so', './b'), - ('x/x/libhdf5.so', '../../a/b'), - ('x/b/libhdf5.so', '../../a/b'), - ('x/libhdf5.so', '../a/b'), - ('libhdf5.so', './a/b'), - ]: - assert utils.relative(f, 'a/b') == r - - -def test_relative_3(): - for f, r in [ - ('a/b/c/d/libhdf5.so', '..'), - ('a/b/c/libhdf5.so', '.'), - ('a/b/libhdf5.so', './c'), - ('a/libhdf5.so', './b/c'), - ('libhdf5.so', './a/b/c'), - ('a/b/x/libhdf5.so', '../c'), - ('a/x/x/libhdf5.so', '../../b/c'), - ('x/x/x/libhdf5.so', '../../../a/b/c'), - ('x/x/libhdf5.so', '../../a/b/c'), - ('x/libhdf5.so', '../a/b/c'), - ]: - assert utils.relative(f, 'a/b/c') == r - - -def test_relative_4(): - for f, r in [ - ('a/b/c/d/libhdf5.so', '.'), - ('a/b/c/x/libhdf5.so', '../d'), - ('a/b/x/x/libhdf5.so', '../../c/d'), - ('a/x/x/x/libhdf5.so', '../../../b/c/d'), - ('x/x/x/x/libhdf5.so', '../../../../a/b/c/d'), - ]: - assert utils.relative(f, 'a/b/c/d') == r + utils.merge_tree(testing_workdir, os.path.join(testing_workdir, "subdir")) def test_expand_globs(testing_workdir): - sub_dir = os.path.join(testing_workdir, 'sub1') + sub_dir = os.path.join(testing_workdir, "sub1") os.mkdir(sub_dir) - ssub_dir = os.path.join(sub_dir, 'ssub1') + ssub_dir = os.path.join(sub_dir, "ssub1") os.mkdir(ssub_dir) - files = ['abc', 'acb', - os.path.join(sub_dir, 'def'), - os.path.join(sub_dir, 'abc'), - os.path.join(ssub_dir, 'ghi'), - os.path.join(ssub_dir, 'abc')] + files = [ + "abc", + "acb", + os.path.join(sub_dir, "def"), + os.path.join(sub_dir, "abc"), + os.path.join(ssub_dir, "ghi"), + os.path.join(ssub_dir, "abc"), + ] for f in files: - with open(f, 'w') as _f: - _f.write('weee') + with open(f, "w") as _f: + _f.write("weee") # Test dirs - exp = utils.expand_globs([os.path.join('sub1', 'ssub1')], testing_workdir) - assert sorted(exp) == sorted([os.path.sep.join(('sub1', 'ssub1', 'ghi')), - os.path.sep.join(('sub1', 'ssub1', 'abc'))]) + exp = utils.expand_globs([os.path.join("sub1", "ssub1")], testing_workdir) + assert sorted(exp) == sorted( + [ + os.path.sep.join(("sub1", "ssub1", "ghi")), + os.path.sep.join(("sub1", "ssub1", "abc")), + ] + ) # Test files - exp = sorted(utils.expand_globs(['abc', files[2]], testing_workdir)) - assert exp == sorted(['abc', os.path.sep.join(('sub1', 'def'))]) + exp = sorted(utils.expand_globs(["abc", files[2]], testing_workdir)) + assert exp == sorted(["abc", os.path.sep.join(("sub1", "def"))]) # Test globs - exp = sorted(utils.expand_globs(['a*', '*/*f', '**/*i'], testing_workdir)) - assert exp == sorted(['abc', 'acb', os.path.sep.join(('sub1', 'def')), - os.path.sep.join(('sub1', 'ssub1', 'ghi'))]) + exp = sorted(utils.expand_globs(["a*", "*/*f", "**/*i"], testing_workdir)) + assert exp == sorted( + [ + "abc", + "acb", + os.path.sep.join(("sub1", "def")), + os.path.sep.join(("sub1", "ssub1", "ghi")), + ] + ) def test_filter_files(): @@ -219,41 +139,51 @@ def test_filter_files(): # Files that should *not* be filtered out. # Example of valid 'x.git' directory: # lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile - files_list = ['a', 'x.git/a', 'something/x.git/a', - 'x.git\\a', 'something\\x.git\\a', 'something/.gitmodules', - 'some/template/directory/.gitignore', 'another.lab', - 'miniconda_trashcan.py', 'conda_trash_avoider.py'] - assert len(utils.filter_files(files_list, '')) == len(files_list) + files_list = [ + "a", + "x.git/a", + "something/x.git/a", + "x.git\\a", + "something\\x.git\\a", + "something/.gitmodules", + "some/template/directory/.gitignore", + "another.lab", + "miniconda_trashcan.py", + "conda_trash_avoider.py", + ] + assert len(utils.filter_files(files_list, "")) == len(files_list) @pytest.mark.serial def test_logger_filtering(caplog, capfd): import logging + log = utils.get_logger(__name__, level=logging.DEBUG) - log.debug('test debug message') - log.info('test info message') - log.info('test duplicate message') - log.info('test duplicate message') - log.warn('test warn message') - log.error('test error message') + log.debug("test debug message") + log.info("test info message") + log.info("test duplicate message") + log.info("test duplicate message") + log.warn("test warn message") + log.error("test error message") out, err = capfd.readouterr() - assert 'test debug message' in out - assert 'test info message' in out - assert 'test warn message' not in out - assert 'test error message' not in out - assert 'test debug message' not in err - assert 'test info message' not in err - assert 'test warn message' in err - assert 'test error message' in err - assert caplog.text.count('duplicate') == 1 + assert "test debug message" in out + assert "test info message" in out + assert "test warn message" not in out + assert "test error message" not in out + assert "test debug message" not in err + assert "test info message" not in err + assert "test warn message" in err + assert "test error message" in err + assert caplog.text.count("duplicate") == 1 log.removeHandler(logging.StreamHandler(sys.stdout)) log.removeHandler(logging.StreamHandler(sys.stderr)) -def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker): - test_file = os.path.join(testing_workdir, 'build_log_config.yaml') - with open(test_file, 'w') as f: - f.write(""" +def test_logger_config_from_file(testing_workdir, capfd, mocker): + test_file = os.path.join(testing_workdir, "build_log_config.yaml") + with open(test_file, "w") as f: + f.write( + f""" version: 1 formatters: simple: @@ -265,214 +195,210 @@ def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker): formatter: simple stream: ext://sys.stdout loggers: - {}: + {__name__}: level: WARN handlers: [console] propagate: no root: level: DEBUG handlers: [console] -""".format(__name__)) - cc_conda_build = mocker.patch.object(utils, 'cc_conda_build') - cc_conda_build.get.return_value = test_file +""" + ) + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={"log_config_file": test_file}, + ) log = utils.get_logger(__name__) # default log level is INFO, but our config file should set level to DEBUG - log.warn('test message') + log.warn("test message") # output should have gone to stdout according to config above. out, err = capfd.readouterr() - assert 'test message' in out + assert "test message" in out # make sure that it is not in stderr - this is testing override of defaults. - assert 'test message' not in err + assert "test message" not in err def test_ensure_valid_spec(): - assert utils.ensure_valid_spec('python') == 'python' - assert utils.ensure_valid_spec('python 2.7') == 'python 2.7.*' - assert utils.ensure_valid_spec('python 2.7.2') == 'python 2.7.2.*' - assert utils.ensure_valid_spec('python 2.7.12 0') == 'python 2.7.12 0' - assert utils.ensure_valid_spec('python >=2.7,<2.8') == 'python >=2.7,<2.8' - assert utils.ensure_valid_spec('numpy x.x') == 'numpy x.x' - assert utils.ensure_valid_spec(utils.MatchSpec('numpy x.x')) == utils.MatchSpec('numpy x.x') + assert utils.ensure_valid_spec("python") == "python" + assert utils.ensure_valid_spec("python 3.8") == "python 3.8.*" + assert utils.ensure_valid_spec("python 3.8.2") == "python 3.8.2.*" + assert utils.ensure_valid_spec("python 3.8.10 0") == "python 3.8.10 0" + assert utils.ensure_valid_spec("python >=3.8,<3.9") == "python >=3.8,<3.9" + assert utils.ensure_valid_spec("numpy x.x") == "numpy x.x" + assert utils.ensure_valid_spec(utils.MatchSpec("numpy x.x")) == utils.MatchSpec( + "numpy x.x" + ) def test_insert_variant_versions(testing_metadata): - testing_metadata.meta['requirements']['build'] = ['python', 'numpy 1.13'] - testing_metadata.config.variant = {'python': '2.7', 'numpy': '1.11'} - utils.insert_variant_versions(testing_metadata.meta.get('requirements', {}), - testing_metadata.config.variant, 'build') + testing_metadata.meta["requirements"]["build"] = ["python", "numpy 1.13"] + testing_metadata.config.variant = {"python": "3.8", "numpy": "1.11"} + utils.insert_variant_versions( + testing_metadata.meta.get("requirements", {}), + testing_metadata.config.variant, + "build", + ) # this one gets inserted - assert 'python 2.7.*' in testing_metadata.meta['requirements']['build'] + assert "python 3.8.*" in testing_metadata.meta["requirements"]["build"] # this one should not be altered - assert 'numpy 1.13' in testing_metadata.meta['requirements']['build'] + assert "numpy 1.13" in testing_metadata.meta["requirements"]["build"] # the overall length does not change - assert len(testing_metadata.meta['requirements']['build']) == 2 + assert len(testing_metadata.meta["requirements"]["build"]) == 2 def test_subprocess_stats_call(testing_workdir): stats = {} - utils.check_call_env(['hostname'], stats=stats, cwd=testing_workdir) + utils.check_call_env(["hostname"], stats=stats, cwd=testing_workdir) assert stats stats = {} - out = utils.check_output_env(['hostname'], stats=stats, cwd=testing_workdir) + out = utils.check_output_env(["hostname"], stats=stats, cwd=testing_workdir) assert out assert stats with pytest.raises(subprocess.CalledProcessError): - utils.check_call_env(['bash', '-c', 'exit 1'], cwd=testing_workdir) + utils.check_call_env(["bash", "-c", "exit 1"], cwd=testing_workdir) def test_try_acquire_locks(testing_workdir): # Acquiring two unlocked locks should succeed. - lock1 = filelock.FileLock(os.path.join(testing_workdir, 'lock1')) - lock2 = filelock.FileLock(os.path.join(testing_workdir, 'lock2')) + lock1 = filelock.FileLock(os.path.join(testing_workdir, "lock1")) + lock2 = filelock.FileLock(os.path.join(testing_workdir, "lock2")) with utils.try_acquire_locks([lock1, lock2], timeout=1): pass # Acquiring the same lock twice should fail. - lock1_copy = filelock.FileLock(os.path.join(testing_workdir, 'lock1')) + lock1_copy = filelock.FileLock(os.path.join(testing_workdir, "lock1")) # Also verify that the error message contains the word "lock", since we rely # on this elsewhere. - with pytest.raises(BuildLockError, match='Failed to acquire all locks'): + with pytest.raises(BuildLockError, match="Failed to acquire all locks"): with utils.try_acquire_locks([lock1, lock1_copy], timeout=1): pass def test_get_lock(testing_workdir): - lock1 = utils.get_lock(os.path.join(testing_workdir, 'lock1')) - lock2 = utils.get_lock(os.path.join(testing_workdir, 'lock2')) + lock1 = utils.get_lock(os.path.join(testing_workdir, "lock1")) + lock2 = utils.get_lock(os.path.join(testing_workdir, "lock2")) # Different folders should get different lock files. assert lock1.lock_file != lock2.lock_file # Same folder should get the same lock file. - lock1_copy = utils.get_lock(os.path.join(testing_workdir, 'lock1')) + lock1_copy = utils.get_lock(os.path.join(testing_workdir, "lock1")) assert lock1.lock_file == lock1_copy.lock_file # ...even when not normalized - lock1_unnormalized = utils.get_lock(os.path.join(testing_workdir, 'foo', '..', 'lock1')) + lock1_unnormalized = utils.get_lock( + os.path.join(testing_workdir, "foo", "..", "lock1") + ) assert lock1.lock_file == lock1_unnormalized.lock_file -@contextlib.contextmanager -def _generate_tmp_tree(): - # dirA - # |\- dirB - # | |\- fileA - # | \-- fileB - # \-- dirC - # |\- fileA - # \-- fileB - import shutil - import tempfile - - try: - tmp = os.path.realpath(os.path.normpath(tempfile.mkdtemp())) - - dA = os.path.join(tmp, "dirA") - dB = os.path.join(dA, "dirB") - dC = os.path.join(dA, "dirC") - for d in (dA, dB, dC): - os.mkdir(d) - - f1 = os.path.join(dB, "fileA") - f2 = os.path.join(dB, "fileB") - f3 = os.path.join(dC, "fileA") - f4 = os.path.join(dC, "fileB") - for f in (f1, f2, f3, f4): - makefile(f) - - yield tmp, (dA, dB, dC), (f1, f2, f3, f4) - finally: - shutil.rmtree(tmp) - - -def test_rec_glob(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - assert sorted(utils.rec_glob(tmp, "fileA")) == [f1, f3] - assert sorted(utils.rec_glob(tmp, ("fileA", "fileB"), ignores="dirB")) == [f3, f4] - assert sorted(utils.rec_glob(tmp, "fileB", ignores=("dirC",))) == [f2] - - -def test_find_recipe(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - f6 = os.path.join(dA, "meta.yml") - f7 = os.path.join(dB, "conda.yaml") - f8 = os.path.join(dC, "conda.yml") - - # check that each of these are valid recipes - for f in (f5, f6, f7, f8): - makefile(f) - assert utils.find_recipe(tmp) == f - os.remove(f) - - -def test_find_recipe_relative(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(dA, "meta.yaml") - makefile(f5) - - # check that even when given a relative recipe path we still return - # the absolute path - saved = os.getcwd() - os.chdir(tmp) - try: - assert utils.find_recipe("dirA") == f5 - finally: - os.chdir(saved) - - -def test_find_recipe_no_meta(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - # no meta files in tmp - with pytest.raises(IOError): - utils.find_recipe(tmp) - - -def test_find_recipe_file(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - makefile(f5) - # file provided is valid meta - assert utils.find_recipe(f5) == f5 - - -def test_find_recipe_file_bad(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - # file provided is not valid meta - with pytest.raises(IOError): - utils.find_recipe(f1) - - -def test_find_recipe_multipe_base(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - f6 = os.path.join(dB, "meta.yaml") - f7 = os.path.join(dC, "conda.yaml") - for f in (f5, f6, f7): - makefile(f) - # multiple meta files, use the one in base level - assert utils.find_recipe(tmp) == f5 - - -def test_find_recipe_multipe_bad(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(dB, "meta.yaml") - f6 = os.path.join(dC, "conda.yaml") - for f in (f5, f6): - makefile(f) - - # nothing in base - with pytest.raises(IOError): - utils.find_recipe(tmp) - - f7 = os.path.join(tmp, "meta.yaml") - f8 = os.path.join(tmp, "conda.yaml") - for f in (f7, f8): - makefile(f) - - # too many in base - with pytest.raises(IOError): - utils.find_recipe(tmp) +def test_rec_glob(tmp_path: Path): + (dirA := tmp_path / "dirA").mkdir() + (dirB := tmp_path / "dirB").mkdir() + + (path1 := dirA / "fileA").touch() + (path2 := dirA / "fileB").touch() + (path3 := dirB / "fileA").touch() + (path4 := dirB / "fileB").touch() + + assert {str(path1), str(path3)} == set(utils.rec_glob(tmp_path, "fileA")) + assert {str(path3), str(path4)} == set( + utils.rec_glob( + tmp_path, + ("fileA", "fileB"), + ignores="dirA", + ) + ) + assert {str(path2)} == set(utils.rec_glob(tmp_path, "fileB", ignores=["dirB"])) + + +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe(tmp_path: Path, file: str): + # check that each of these are valid recipes + for path in ( + tmp_path / file, + tmp_path / "dirA" / file, + tmp_path / "dirA" / "dirB" / file, + tmp_path / "dirA" / "dirC" / file, + ): + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + assert path.samefile(utils.find_recipe(tmp_path)) + path.unlink() + + +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe_relative(tmp_path: Path, monkeypatch: MonkeyPatch, file: str): + (dirA := tmp_path / "dirA").mkdir() + (path := dirA / file).touch() + + # check that even when given a relative recipe path we still return + # the absolute path + monkeypatch.chdir(tmp_path) + assert path.samefile(utils.find_recipe("dirA")) + + +def test_find_recipe_no_meta(tmp_path: Path): + # no recipe in tmp_path + with pytest.raises(IOError): + utils.find_recipe(tmp_path) + + +def test_find_recipe_file(tmp_path: Path): + # provided recipe is valid + (path := tmp_path / "meta.yaml").touch() + assert path.samefile(utils.find_recipe(path)) + + +def test_find_recipe_file_bad(tmp_path: Path): + # missing recipe is invalid + path = tmp_path / "not_a_recipe" + with pytest.raises(IOError): + utils.find_recipe(path) + + # provided recipe is invalid + path.touch() + with pytest.raises(IOError): + utils.find_recipe(path) + + +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe_multipe_base(tmp_path: Path, file: str): + (dirA := tmp_path / "dirA").mkdir() + (dirB := dirA / "dirB").mkdir() + (dirC := dirA / "dirC").mkdir() + + (path1 := tmp_path / file).touch() + (dirA / file).touch() + (dirB / file).touch() + (dirC / file).touch() + + # multiple recipe, use the one at the top level + assert path1.samefile(utils.find_recipe(tmp_path)) + + +@pytest.mark.parametrize("stem", ["meta", "conda"]) +def test_find_recipe_multipe_bad(tmp_path: Path, stem: str): + (dirA := tmp_path / "dirA").mkdir() + (dirB := dirA / "dirB").mkdir() + (dirC := dirA / "dirC").mkdir() + + # create multiple nested recipes at the same depth + (dirB / f"{stem}.yml").touch() + (dirC / f"{stem}.yaml").touch() + + # too many equal priority recipes found + with pytest.raises(IOError): + utils.find_recipe(tmp_path) + + # create multiple recipes at the top level + (tmp_path / f"{stem}.yml").touch() + (tmp_path / f"{stem}.yaml").touch() + + # too many recipes in the top level + with pytest.raises(IOError): + utils.find_recipe(tmp_path) class IsCondaPkgTestData(NamedTuple): @@ -484,27 +410,22 @@ class IsCondaPkgTestData(NamedTuple): IS_CONDA_PKG_DATA = ( IsCondaPkgTestData( - value='aws-c-common-0.4.57-hb1e8313_1.tar.bz2', + value="aws-c-common-0.4.57-hb1e8313_1.tar.bz2", expected=True, is_dir=False, - create=True + create=True, ), IsCondaPkgTestData( - value='aws-c-common-0.4.57-hb1e8313_1.tar.bz2', + value="aws-c-common-0.4.57-hb1e8313_1.tar.bz2", expected=False, is_dir=False, - create=False - ), - IsCondaPkgTestData( - value='somedir', - expected=False, - is_dir=True, - create=False + create=False, ), + IsCondaPkgTestData(value="somedir", expected=False, is_dir=True, create=False), ) -@pytest.mark.parametrize('value,expected,is_dir,create', IS_CONDA_PKG_DATA) +@pytest.mark.parametrize("value,expected,is_dir,create", IS_CONDA_PKG_DATA) def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: bool): if create: value = os.path.join(tmpdir, value) @@ -515,3 +436,25 @@ def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: fp.write("test") assert utils.is_conda_pkg(value) == expected + + +def test_prefix_files(tmp_path: Path): + # all files within the prefix are found + (prefix := tmp_path / "prefix1").mkdir() + (file1 := prefix / "file1").touch() + (dirA := prefix / "dirA").mkdir() + (file2 := dirA / "file2").touch() + (dirB := prefix / "dirB").mkdir() + (file3 := dirB / "file3").touch() + + # files outside of the prefix are not found + (prefix2 := tmp_path / "prefix2").mkdir() + (prefix2 / "file4").touch() + (dirC := prefix2 / "dirC").mkdir() + (dirC / "file5").touch() + + # even if they are symlinked + (link1 := prefix / "dirC").symlink_to(dirC) + + paths = {str(path.relative_to(prefix)) for path in (file1, file2, file3, link1)} + assert paths == utils.prefix_files(str(prefix)) diff --git a/tests/test_variants.py b/tests/test_variants.py index e8ace350a7..3c79e36e16 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -1,134 +1,175 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict -import os import json +import os +import platform import re import sys +from pathlib import Path import pytest import yaml +from conda.common.compat import on_mac + +from conda_build import api, exceptions +from conda_build.utils import ensure_list, package_has_file +from conda_build.variants import ( + combine_specs, + dict_of_lists_to_list_of_dicts, + filter_combined_spec_to_used_keys, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, + get_package_variants, + get_vars, + validate_spec, +) + +from .utils import variants_dir + + +@pytest.mark.parametrize( + "variants", + [ + (["1.2", "3.4"], "5.6"), + ("1.2", ["3.4", "5.6"]), + ], +) +def test_spec_priority_overriding(variants): + name = "package" + + first, second = variants + ordered_specs = { + "first": {name: first}, + "second": {name: second}, + } -from conda_build import api, exceptions, variants -from conda_build.utils import package_has_file - -thisdir = os.path.dirname(__file__) -recipe_dir = os.path.join(thisdir, 'test-recipes', 'variants') - - -def test_later_spec_priority(single_version, no_numpy_version): - # override a single key - specs = OrderedDict() - specs['no_numpy'] = no_numpy_version - specs['single_ver'] = single_version - - combined_spec = variants.combine_specs(specs) - assert len(combined_spec) == 2 - assert combined_spec["python"] == ["2.7.*"] - - # keep keys that are not overwritten - specs = OrderedDict() - specs['single_ver'] = single_version - specs['no_numpy'] = no_numpy_version - combined_spec = variants.combine_specs(specs) - assert len(combined_spec) == 2 - assert len(combined_spec["python"]) == 2 - - -def test_get_package_variants_from_file(testing_workdir, testing_config, no_numpy_version): - with open('variant_example.yaml', 'w') as f: - yaml.dump(no_numpy_version, f, default_flow_style=False) - testing_config.variant_config_files = [os.path.join(testing_workdir, 'variant_example.yaml')] + combined = combine_specs(ordered_specs)[name] + expected = ensure_list(second) + assert len(combined) == len(expected) + assert combined == expected + + +@pytest.mark.parametrize( + "as_yaml", + [ + pytest.param(True, id="yaml"), + pytest.param(False, id="dict"), + ], +) +def test_python_variants(testing_workdir, testing_config, as_yaml): + """Python variants are treated differently in conda recipes. Instead of being pinned against a + specific version they are converted into version ranges. E.g.: + + python 3.5 -> python >=3.5,<3.6.0a0 + otherPackages 3.5 -> otherPackages 3.5 + """ + variants = {"python": ["3.11", "3.12"]} testing_config.ignore_system_config = True - metadata = api.render(os.path.join(thisdir, "variant_recipe"), - no_download_source=False, config=testing_config) - # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension - assert len(metadata) == 2 - assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 - assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 + # write variants to disk + if as_yaml: + variants_path = Path(testing_workdir, "variant_example.yaml") + variants_path.write_text(yaml.dump(variants, default_flow_style=False)) + testing_config.variant_config_files = [str(variants_path)] + + # render the metadata + metadata_tuples = api.render( + os.path.join(variants_dir, "variant_recipe"), + no_download_source=False, + config=testing_config, + # if variants were written to disk then don't pass it along + variants=None if as_yaml else variants, + ) -def test_use_selectors_in_variants(testing_workdir, testing_config): - testing_config.variant_config_files = [os.path.join(recipe_dir, - 'selector_conda_build_config.yaml')] - variants.get_package_variants(testing_workdir, testing_config) + # we should have one package/metadata per python version + assert len(metadata_tuples) == 2 + # there should only be one run requirement for each package/metadata + assert len(metadata_tuples[0][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[1][0].meta["requirements"]["run"]) == 1 + # the run requirements should be python ranges + assert { + *metadata_tuples[0][0].meta["requirements"]["run"], + *metadata_tuples[1][0].meta["requirements"]["run"], + } == {"python >=3.11,<3.12.0a0", "python >=3.12,<3.13.0a0"} -def test_get_package_variants_from_dictionary_of_lists(testing_config, no_numpy_version): - testing_config.ignore_system_config = True - metadata = api.render(os.path.join(thisdir, "variant_recipe"), - no_download_source=False, config=testing_config, - variants=no_numpy_version) - # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension - assert len(metadata) == 2, metadata - assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 - assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 - - -@pytest.mark.xfail(reason="Strange failure 7/19/2017. Can't reproduce locally. Test runs fine " - "with parallelism and everything. Test fails reproducibly on CI, but logging " - "into appveyor after failed run, test passes. =(") -def test_variant_with_ignore_numpy_version_reduces_matrix(numpy_version_ignored): - # variants are defined in yaml file in this folder - # there are two python versions and two numpy versions. However, because numpy is not pinned, - # the numpy dimensions should get collapsed. - recipe = os.path.join(recipe_dir, '03_numpy_matrix') - metadata = api.render(recipe, variants=numpy_version_ignored, finalize=False) - assert len(metadata) == 2, metadata +def test_use_selectors_in_variants(testing_workdir, testing_config): + testing_config.variant_config_files = [ + os.path.join(variants_dir, "selector_conda_build_config.yaml") + ] + get_package_variants(testing_workdir, testing_config) + + +@pytest.mark.xfail( + reason=( + "7/19/2017 Strange failure. Can't reproduce locally. Test runs fine " + "with parallelism and everything. Test fails reproducibly on CI, but logging " + "into appveyor after failed run, test passes." + "1/9/2023 ignore_version doesn't work as advertised." + ) +) +def test_variant_with_ignore_version_reduces_matrix(): + metadata_tuples = api.render( + os.path.join(variants_dir, "03_ignore_version_reduces_matrix"), + variants={ + "packageA": ["1.2", "3.4"], + "packageB": ["5.6", "7.8"], + # packageB is ignored so that dimension should get collapsed + "ignore_version": "packageB", + }, + finalize=False, + ) + assert len(metadata_tuples) == 2 def test_variant_with_numpy_pinned_has_matrix(): - recipe = os.path.join(recipe_dir, '04_numpy_matrix_pinned') - metadata = api.render(recipe, finalize=False) - assert len(metadata) == 4 + recipe = os.path.join(variants_dir, "04_numpy_matrix_pinned") + metadata_tuples = api.render(recipe, finalize=False) + assert len(metadata_tuples) == 4 def test_pinning_in_build_requirements(): - recipe = os.path.join(recipe_dir, '05_compatible') + recipe = os.path.join(variants_dir, "05_compatible") metadata = api.render(recipe)[0][0] - build_requirements = metadata.meta['requirements']['build'] + build_requirements = metadata.meta["requirements"]["build"] # make sure that everything in the build deps is exactly pinned - assert all(len(req.split(' ')) == 3 for req in build_requirements) + assert all(len(req.split(" ")) == 3 for req in build_requirements) @pytest.mark.sanity def test_no_satisfiable_variants_raises_error(): - recipe = os.path.join(recipe_dir, '01_basic_templating') + recipe = os.path.join(variants_dir, "01_basic_templating") with pytest.raises(exceptions.DependencyNeedsBuildingError): api.render(recipe, permit_unsatisfiable_variants=False) - - # the packages are not installable anyway, so this should show a warning that recipe can't - # be finalized api.render(recipe, permit_unsatisfiable_variants=True) - # out, err = capsys.readouterr() - # print(out) - # print(err) - # print(caplog.text) - # assert "Returning non-final recipe; one or more dependencies was unsatisfiable" in err def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" - v = {'python': ['2.7', '3.5'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = variants.dict_of_lists_to_list_of_dicts(v) - assert len(ld) == 2 - assert ld[0]['python'] == '2.7' - assert ld[0]['vc'] == '9' - assert ld[1]['python'] == '3.5' - assert ld[1]['vc'] == '14' + variants = { + "packageA": ["1.2", "3.4"], + "packageB": ["5", "6"], + "zip_keys": [("packageA", "packageB")], + } + zipped = dict_of_lists_to_list_of_dicts(variants) + assert len(zipped) == 2 + assert zipped[0]["packageA"] == "1.2" + assert zipped[0]["packageB"] == "5" + assert zipped[1]["packageA"] == "3.4" + assert zipped[1]["packageB"] == "6" # allow duplication of values, but lengths of lists must always match - v = {'python': ['2.7', '2.7'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = variants.dict_of_lists_to_list_of_dicts(v) - assert len(ld) == 2 - assert ld[0]['python'] == '2.7' - assert ld[0]['vc'] == '9' - assert ld[1]['python'] == '2.7' - assert ld[1]['vc'] == '14' + variants = { + "packageA": ["1.2", "1.2"], + "packageB": ["5", "6"], + "zip_keys": [("packageA", "packageB")], + } + zipped = dict_of_lists_to_list_of_dicts(variants) + assert len(zipped) == 2 + assert zipped[0]["packageA"] == "1.2" + assert zipped[0]["packageB"] == "5" + assert zipped[1]["packageA"] == "1.2" + assert zipped[1]["packageB"] == "6" def test_validate_spec(): @@ -138,7 +179,7 @@ def test_validate_spec(): """ spec = { # normal expansions - "foo": [2.7, 3.7, 3.8], + "foo": [1.2, 3.4], # zip_keys are the values that need to be expanded as a set "zip_keys": [["bar", "baz"], ["qux", "quux", "quuz"]], "bar": [1, 2, 3], @@ -151,82 +192,93 @@ def test_validate_spec(): "corge": 42, } # valid spec - variants.validate_spec("spec", spec) + validate_spec("spec", spec) spec2 = dict(spec) spec2["bad-char"] = "bad-char" # invalid characters with pytest.raises(ValueError): - variants.validate_spec("spec[bad_char]", spec2) + validate_spec("spec[bad_char]", spec2) spec3 = dict(spec, zip_keys="bad_zip_keys") # bad zip_keys with pytest.raises(ValueError): - variants.validate_spec("spec[bad_zip_keys]", spec3) + validate_spec("spec[bad_zip_keys]", spec3) spec4 = dict(spec, zip_keys=[["bar", "baz"], ["qux", "quux"], ["quuz", "missing"]]) # zip_keys' zip_group has key missing from spec with pytest.raises(ValueError): - variants.validate_spec("spec[missing_key]", spec4) + validate_spec("spec[missing_key]", spec4) spec5 = dict(spec, zip_keys=[["bar", "baz"], ["qux", "quux", "quuz"], ["quuz"]]) # zip_keys' zip_group has duplicate key with pytest.raises(ValueError): - variants.validate_spec("spec[duplicate_key]", spec5) + validate_spec("spec[duplicate_key]", spec5) spec6 = dict(spec, baz=[4, 6]) # zip_keys' zip_group key fields have same length with pytest.raises(ValueError): - variants.validate_spec("spec[duplicate_key]", spec6) + validate_spec("spec[duplicate_key]", spec6) def test_cross_compilers(): - recipe = os.path.join(recipe_dir, '09_cross') - ms = api.render(recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True) - assert len(ms) == 3 + recipe = os.path.join(variants_dir, "09_cross") + metadata_tuples = api.render( + recipe, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + ) + assert len(metadata_tuples) == 3 def test_variants_in_output_names(): - recipe = os.path.join(recipe_dir, '11_variant_output_names') + recipe = os.path.join(variants_dir, "11_variant_output_names") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 4 -def test_variants_in_versions_with_setup_py_data(testing_workdir): - recipe = os.path.join(recipe_dir, '12_variant_versions') +def test_variants_in_versions_with_setup_py_data(): + recipe = os.path.join(variants_dir, "12_variant_versions") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 2 - assert any(os.path.basename(pkg).startswith('my_package-470.470') for pkg in outputs) - assert any(os.path.basename(pkg).startswith('my_package-480.480') for pkg in outputs) + assert any( + os.path.basename(pkg).startswith("my_package-470.470") for pkg in outputs + ) + assert any( + os.path.basename(pkg).startswith("my_package-480.480") for pkg in outputs + ) -def test_git_variables_with_variants(testing_workdir, testing_config): - recipe = os.path.join(recipe_dir, '13_git_vars') - m = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True)[0][0] - assert m.version() == "1.20.2" - assert m.build_number() == 0 +def test_git_variables_with_variants(testing_config): + recipe = os.path.join(variants_dir, "13_git_vars") + metadata = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + )[0][0] + assert metadata.version() == "1.20.2" + assert metadata.build_number() == 0 def test_variant_input_with_zip_keys_keeps_zip_keys_list(): spec = { - 'scipy': ['0.17', '0.19'], - 'sqlite': ['3'], - 'zlib': ['1.2'], - 'xz': ['5'], - 'zip_keys': ['sqlite', 'zlib', 'xz'], - 'pin_run_as_build': {'python': {'min_pin': 'x.x', 'max_pin': 'x.x'}} + "scipy": ["0.17", "0.19"], + "sqlite": ["3"], + "zlib": ["1.2"], + "xz": ["5"], + "zip_keys": ["sqlite", "zlib", "xz"], + "pin_run_as_build": {"python": {"min_pin": "x.x", "max_pin": "x.x"}}, } - vrnts = variants.dict_of_lists_to_list_of_dicts(spec) + vrnts = dict_of_lists_to_list_of_dicts(spec) assert len(vrnts) == 2 assert vrnts[0].get("zip_keys") == spec["zip_keys"] @pytest.mark.serial @pytest.mark.xfail(sys.platform == "win32", reason="console readout issues on appveyor") -def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, caplog): +def test_ensure_valid_spec_on_run_and_test(testing_config, caplog): testing_config.debug = True testing_config.verbose = True - recipe = os.path.join(recipe_dir, '14_variant_in_run_and_test') + recipe = os.path.join(variants_dir, "14_variant_in_run_and_test") api.render(recipe, config=testing_config) text = caplog.text @@ -236,282 +288,520 @@ def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, capl assert "Adding .* to spec 'pytest-mock 1.6'" not in text +@pytest.mark.skipif( + on_mac and platform.machine() == "arm64", + reason="Unsatisfiable dependencies for M1 MacOS: {'bzip2=1.0.6'}", +) def test_serial_builds_have_independent_configs(testing_config): - recipe = os.path.join(recipe_dir, '17_multiple_recipes_independent_config') - recipes = [os.path.join(recipe, dirname) for dirname in ('a', 'b')] + recipe = os.path.join(variants_dir, "17_multiple_recipes_independent_config") + recipes = [os.path.join(recipe, dirname) for dirname in ("a", "b")] outputs = api.build(recipes, config=testing_config) - index_json = json.loads(package_has_file(outputs[0], 'info/index.json')) - assert 'bzip2 >=1,<1.0.7.0a0' in index_json['depends'] - index_json = json.loads(package_has_file(outputs[1], 'info/index.json')) - assert 'bzip2 >=1.0.6,<2.0a0' in index_json['depends'] + index_json = json.loads(package_has_file(outputs[0], "info/index.json")) + assert "bzip2 >=1,<1.0.7.0a0" in index_json["depends"] + index_json = json.loads(package_has_file(outputs[1], "info/index.json")) + assert "bzip2 >=1.0.6,<2.0a0" in index_json["depends"] def test_subspace_selection(testing_config): - recipe = os.path.join(recipe_dir, '18_subspace_selection') - testing_config.variant = {'a': 'coffee'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + recipe = os.path.join(variants_dir, "18_subspace_selection") + testing_config.variant = {"a": "coffee"} + metadata_tuples = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) # there are two entries with a==coffee, so we should end up with 2 variants - assert len(ms) == 2 + assert len(metadata_tuples) == 2 # ensure that the zipped keys still agree - assert sum(m.config.variant['b'] == '123' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'abc' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'concrete' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'mooo' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'baaa' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'woof' for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test compound selection - testing_config.variant = {'a': 'coffee', 'b': '123'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + testing_config.variant = {"a": "coffee", "b": "123"} + metadata_tuples = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) # there are two entries with a==coffee, but one with both 'coffee' for a, and '123' for b, # so we should end up with 1 variants - assert len(ms) == 1 + assert len(metadata_tuples) == 1 # ensure that the zipped keys still agree - assert sum(m.config.variant['b'] == '123' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'abc' for m, _, _ in ms) == 0 - assert sum(m.config.variant['b'] == 'concrete' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'mooo' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'baaa' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'woof' for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 0 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test when configuration leads to no valid combinations - only c provided, and its value # doesn't match any other existing values of c, so it's then ambiguous which zipped # values to choose - testing_config.variant = {'c': 'not an animal'} + testing_config.variant = {"c": "not an animal"} with pytest.raises(ValueError): - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) # all zipped keys provided by the new variant. It should clobber the old one. - testing_config.variant = {'a': 'some', 'b': 'new', 'c': 'animal'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) - assert len(ms) == 1 - assert ms[0][0].config.variant['a'] == 'some' - assert ms[0][0].config.variant['b'] == 'new' - assert ms[0][0].config.variant['c'] == 'animal' - - -def test_get_used_loop_vars(testing_config): - m = api.render(os.path.join(recipe_dir, '19_used_variables'), finalize=False, bypass_env_check=True)[0][0] + testing_config.variant = {"a": "some", "b": "new", "c": "animal"} + metadata_tuples = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].config.variant["a"] == "some" + assert metadata_tuples[0][0].config.variant["b"] == "new" + assert metadata_tuples[0][0].config.variant["c"] == "animal" + + +def test_get_used_loop_vars(): + metadata = api.render( + os.path.join(variants_dir, "19_used_variables"), + finalize=False, + bypass_env_check=True, + )[0][0] # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable - assert m.get_used_loop_vars() == {'python', 'some_package'} + assert metadata.get_used_loop_vars() == {"python", "some_package"} # these are all used vars - including those with only one value (and thus not loop vars) - assert m.get_used_vars() == {'python', 'some_package', 'zlib', 'pthread_stubs', 'target_platform'} + assert metadata.get_used_vars() == { + "python", + "some_package", + "zlib", + "pthread_stubs", + "target_platform", + } -def test_reprovisioning_source(testing_config): - api.render(os.path.join(recipe_dir, "20_reprovision_source")) +def test_reprovisioning_source(): + api.render(os.path.join(variants_dir, "20_reprovision_source")) def test_reduced_hashing_behavior(testing_config): # recipes using any compiler jinja2 function need a hash - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_yes_compiler'), - finalize=False, bypass_env_check=True)[0][0] - assert 'c_compiler' in m.get_hash_contents(), "hash contents should contain c_compiler" - assert re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()), \ - "hash should be present when compiler jinja2 function is used" + metadata = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_compiler"), + finalize=False, + bypass_env_check=True, + )[0][0] + assert ( + "c_compiler" in metadata.get_hash_contents() + ), "hash contents should contain c_compiler" + assert re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() + ), "hash should be present when compiler jinja2 function is used" # recipes that use some variable in conda_build_config.yaml to control what # versions are present at build time also must have a hash (except # python, r_base, and the other stuff covered by legacy build string # behavior) - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_yes_pinned'), - finalize=False, bypass_env_check=True)[0][0] - assert 'zlib' in m.get_hash_contents() - assert re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) + metadata = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_pinned"), + finalize=False, + bypass_env_check=True, + )[0][0] + assert "zlib" in metadata.get_hash_contents() + assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id()) # anything else does not get a hash - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_no_python'), - finalize=False, bypass_env_check=True)[0][0] - assert not m.get_hash_contents() - assert not re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) + metadata = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_no_python"), + finalize=False, + bypass_env_check=True, + )[0][0] + assert not metadata.get_hash_contents() + assert not re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() + ) -def test_variants_used_in_jinja2_conditionals(testing_config): - ms = api.render(os.path.join(recipe_dir, '21_conditional_sections'), - finalize=False, bypass_env_check=True) - assert len(ms) == 2 - assert sum(m.config.variant['blas_impl'] == 'mkl' for m, _, _ in ms) == 1 - assert sum(m.config.variant['blas_impl'] == 'openblas' for m, _, _ in ms) == 1 +def test_variants_used_in_jinja2_conditionals(): + metadata_tuples = api.render( + os.path.join(variants_dir, "21_conditional_sections"), + finalize=False, + bypass_env_check=True, + ) + assert len(metadata_tuples) == 2 + assert ( + sum( + metadata.config.variant["blas_impl"] == "mkl" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["blas_impl"] == "openblas" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) -def test_build_run_exports_act_on_host(testing_config, caplog): +def test_build_run_exports_act_on_host(caplog): """Regression test for https://github.com/conda/conda-build/issues/2559""" - api.render(os.path.join(recipe_dir, '22_run_exports_rerendered_for_other_variants'), - platform='win', arch='64') - assert "failed to get install actions, retrying" not in caplog.text + api.render( + os.path.join(variants_dir, "22_run_exports_rerendered_for_other_variants"), + platform="win", + arch="64", + ) + assert "failed to get package records, retrying" not in caplog.text -def test_detect_variables_in_build_and_output_scripts(testing_config): - ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), - platform='linux', arch='64') - for m, _, _ in ms: - if m.name() == 'test_find_used_variables_in_scripts': - used_vars = m.get_used_vars() +def test_detect_variables_in_build_and_output_scripts(): + metadata_tuples = api.render( + os.path.join(variants_dir, "24_test_used_vars_in_scripts"), + platform="linux", + arch="64", + ) + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' in used_vars - assert 'OUTPUT_SELECTOR_VAR' not in used_vars - assert 'BASH_VAR1' in used_vars - assert 'BASH_VAR2' in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' not in used_vars + assert "SELECTOR_VAR" in used_vars + assert "OUTPUT_SELECTOR_VAR" not in used_vars + assert "BASH_VAR1" in used_vars + assert "BASH_VAR2" in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' not in used_vars - assert 'OUTPUT_SELECTOR_VAR' in used_vars - assert 'BASH_VAR1' not in used_vars - assert 'BASH_VAR2' not in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' in used_vars + assert "SELECTOR_VAR" not in used_vars + assert "OUTPUT_SELECTOR_VAR" in used_vars + assert "BASH_VAR1" not in used_vars + assert "BASH_VAR2" not in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" in used_vars # on windows, we find variables in bat scripts as well as shell scripts - ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), - platform='win', arch='64') - for m, _, _ in ms: - if m.name() == 'test_find_used_variables_in_scripts': - used_vars = m.get_used_vars() + metadata_tuples = api.render( + os.path.join(variants_dir, "24_test_used_vars_in_scripts"), + platform="win", + arch="64", + ) + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' in used_vars - assert 'OUTPUT_SELECTOR_VAR' not in used_vars - assert 'BASH_VAR1' in used_vars - assert 'BASH_VAR2' in used_vars + assert "SELECTOR_VAR" in used_vars + assert "OUTPUT_SELECTOR_VAR" not in used_vars + assert "BASH_VAR1" in used_vars + assert "BASH_VAR2" in used_vars # bat is in addition to bash, not instead of - assert 'BAT_VAR' in used_vars - assert 'OUTPUT_VAR' not in used_vars + assert "BAT_VAR" in used_vars + assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' not in used_vars - assert 'OUTPUT_SELECTOR_VAR' in used_vars - assert 'BASH_VAR1' not in used_vars - assert 'BASH_VAR2' not in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' in used_vars - - -def test_target_platform_looping(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, '25_target_platform_looping'), - platform='win', arch='64') + assert "SELECTOR_VAR" not in used_vars + assert "OUTPUT_SELECTOR_VAR" in used_vars + assert "BASH_VAR1" not in used_vars + assert "BASH_VAR2" not in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" in used_vars + + +def test_target_platform_looping(): + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "25_target_platform_looping"), + platform="win", + arch="64", + ) assert len(outputs) == 2 -def test_numpy_used_variable_looping(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'numpy_used')) +def test_numpy_used_variable_looping(): + outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4 -def test_exclusive_config_files(testing_workdir): - with open('conda_build_config.yaml', 'w') as f: - yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) - os.makedirs('config_dir') - with open(os.path.join('config_dir', 'config-0.yaml'), 'w') as f: - yaml.dump({'abc': ['super_0'], 'exclusive_0': ['0'], 'exclusive_both': ['0']}, - f, default_flow_style=False) - with open(os.path.join('config_dir', 'config-1.yaml'), 'w') as f: - yaml.dump({'abc': ['super_1'], 'exclusive_1': ['1'], 'exclusive_both': ['1']}, - f, default_flow_style=False) +def test_exclusive_config_files(): + with open("conda_build_config.yaml", "w") as f: + yaml.dump({"abc": ["someval"], "cwd": ["someval"]}, f, default_flow_style=False) + os.makedirs("config_dir") + with open(os.path.join("config_dir", "config-0.yaml"), "w") as f: + yaml.dump( + {"abc": ["super_0"], "exclusive_0": ["0"], "exclusive_both": ["0"]}, + f, + default_flow_style=False, + ) + with open(os.path.join("config_dir", "config-1.yaml"), "w") as f: + yaml.dump( + {"abc": ["super_1"], "exclusive_1": ["1"], "exclusive_both": ["1"]}, + f, + default_flow_style=False, + ) exclusive_config_files = ( - os.path.join('config_dir', 'config-0.yaml'), - os.path.join('config_dir', 'config-1.yaml'), + os.path.join("config_dir", "config-0.yaml"), + os.path.join("config_dir", "config-1.yaml"), ) - output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), - exclusive_config_files=exclusive_config_files)[0][0] - variant = output.config.variant + metadata = api.render( + os.path.join(variants_dir, "exclusive_config_file"), + exclusive_config_files=exclusive_config_files, + )[0][0] + variant = metadata.config.variant # is cwd ignored? - assert 'cwd' not in variant + assert "cwd" not in variant # did we load the exclusive configs? - assert variant['exclusive_0'] == '0' - assert variant['exclusive_1'] == '1' + assert variant["exclusive_0"] == "0" + assert variant["exclusive_1"] == "1" # does later exclusive config override initial one? - assert variant['exclusive_both'] == '1' + assert variant["exclusive_both"] == "1" # does recipe config override exclusive? - assert 'unique_to_recipe' in variant - assert variant['abc'] == '123' - - -def test_exclusive_config_file(testing_workdir): - with open('conda_build_config.yaml', 'w') as f: - yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) - os.makedirs('config_dir') - with open(os.path.join('config_dir', 'config.yaml'), 'w') as f: - yaml.dump({'abc': ['super'], 'exclusive': ['someval']}, f, default_flow_style=False) - output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), - exclusive_config_file=os.path.join('config_dir', 'config.yaml'))[0][0] - variant = output.config.variant + assert "unique_to_recipe" in variant + assert variant["abc"] == "123" + + +def test_exclusive_config_file(): + with open("conda_build_config.yaml", "w") as f: + yaml.dump({"abc": ["someval"], "cwd": ["someval"]}, f, default_flow_style=False) + os.makedirs("config_dir") + with open(os.path.join("config_dir", "config.yaml"), "w") as f: + yaml.dump( + {"abc": ["super"], "exclusive": ["someval"]}, f, default_flow_style=False + ) + metadata = api.render( + os.path.join(variants_dir, "exclusive_config_file"), + exclusive_config_file=os.path.join("config_dir", "config.yaml"), + )[0][0] + variant = metadata.config.variant # is cwd ignored? - assert 'cwd' not in variant + assert "cwd" not in variant # did we load the exclusive config - assert 'exclusive' in variant + assert "exclusive" in variant # does recipe config override exclusive? - assert 'unique_to_recipe' in variant - assert variant['abc'] == '123' + assert "unique_to_recipe" in variant + assert variant["abc"] == "123" +@pytest.mark.skipif( + on_mac and platform.machine() == "arm64", + reason="M1 Mac-specific file system error related to this test", +) def test_inner_python_loop_with_output(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config) + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 - - testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')] - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config) + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 + + testing_config.variant_config_files = [ + os.path.join( + variants_dir, "test_python_as_subpackage_loop", "config_with_zip.yaml" + ) + ] + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 - - testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')] - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config, platform='win', arch=64) + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 + + testing_config.variant_config_files = [ + os.path.join( + variants_dir, "test_python_as_subpackage_loop", "config_with_zip.yaml" + ) + ] + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + platform="win", + arch=64, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 def test_variant_as_dependency_name(testing_config): - outputs = api.render(os.path.join(recipe_dir, '27_requirements_host'), - config=testing_config) - assert len(outputs) == 2 + metadata_tuples = api.render( + os.path.join(variants_dir, "27_requirements_host"), config=testing_config + ) + assert len(metadata_tuples) == 2 def test_custom_compiler(): - recipe = os.path.join(recipe_dir, '28_custom_compiler') - ms = api.render(recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True) - assert len(ms) == 3 + recipe = os.path.join(variants_dir, "28_custom_compiler") + metadata_tuples = api.render( + recipe, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + ) + assert len(metadata_tuples) == 3 def test_different_git_vars(): - recipe = os.path.join(recipe_dir, '29_different_git_vars') - ms = api.render(recipe) - versions = [m[0].version() for m in ms] + recipe = os.path.join(variants_dir, "29_different_git_vars") + metadata_tuples = api.render(recipe) + versions = [metadata[0].version() for metadata in metadata_tuples] assert "1.20.0" in versions assert "1.21.11" in versions -@pytest.mark.skipif(sys.platform != "linux", reason="recipe uses a unix specific script") +@pytest.mark.skipif( + sys.platform != "linux", reason="recipe uses a unix specific script" +) def test_top_level_finalized(testing_config): # see https://github.com/conda/conda-build/issues/3618 - recipe = os.path.join(recipe_dir, '30_top_level_finalized') + recipe = os.path.join(variants_dir, "30_top_level_finalized") outputs = api.build(recipe, config=testing_config) - xzcat_output = package_has_file(outputs[0], 'xzcat_output') - assert '5.2.3' in xzcat_output + xzcat_output = package_has_file(outputs[0], "xzcat_output") + assert "5.2.3" in xzcat_output -def test_variant_subkeys_retained(testing_config): - m = api.render(os.path.join(recipe_dir, '31_variant_subkeys'), finalize=False, bypass_env_check=True)[0][0] +def test_variant_subkeys_retained(): + metadata = api.render( + os.path.join(variants_dir, "31_variant_subkeys"), + finalize=False, + bypass_env_check=True, + )[0][0] found_replacements = False from conda_build.build import get_all_replacements - for variant in m.config.variants: + + for variant in metadata.config.variants: found_replacements = get_all_replacements(variant) assert len(found_replacements), "Did not find replacements" - m.final = False - outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False) + metadata.final = False + outputs = metadata.get_output_metadata_set(permit_unsatisfiable_variants=False) get_all_replacements(outputs[0][1].config.variant) + + +@pytest.mark.parametrize( + "internal_defaults, low_prio_config, high_prio_config, expected", + [ + pytest.param( + {"pkg_1": "1.0"}, + {"pkg_1": "1.1"}, + {"pkg_1": ["1.1", "1.2"], "pkg_2": ["1.1"]}, + [{"pkg_1": "1.1", "pkg_2": "1.1"}, {"pkg_1": "1.2", "pkg_2": "1.1"}], + id="basic", + ), + pytest.param( + {"pkg_1": "1.0"}, + {"pkg_1": "1.1"}, + { + "pkg_1": ["1.1", "1.2"], + "pkg_2": ["1.1", "1.2"], + "zip_keys": [["pkg_1", "pkg_2"]], + }, + [ + {"pkg_1": "1.1", "pkg_2": "1.1", "zip_keys": [["pkg_1", "pkg_2"]]}, + {"pkg_1": "1.2", "pkg_2": "1.2", "zip_keys": [["pkg_1", "pkg_2"]]}, + ], + id="zip_keys", + ), + ], +) +def test_zip_key_filtering( + internal_defaults, low_prio_config, high_prio_config, expected +): + combined_spec = { + **low_prio_config, + **high_prio_config, + } + specs = { + "internal_defaults": internal_defaults, + "low_prio_config": low_prio_config, + "high_prio_config": high_prio_config, + } + + assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected + + +def test_get_vars(): + variants = [ + { + "python": "3.12", + "nodejs": "20", + "zip_keys": [], # ignored + }, + {"python": "3.12", "nodejs": "18"}, + {"python": "3.12", "nodejs": "20"}, + ] + + assert get_vars(variants) == {"nodejs"} + + +def test_find_used_variables_in_shell_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"${variants[0]}\n" + f"${{{variants[1]}}}\n" + f"${{{{{variants[2]}}}}}\n" + f"$${variants[3]}\n" + ) + assert find_used_variables_in_shell_script(variants, script) == {"FOO", "BAR"} + + +def test_find_used_variables_in_batch_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"%{variants[0]}%\n" + f"%%{variants[1]}%%\n" + f"${variants[2]}\n" + f"${{{variants[3]}}}\n" + ) + assert find_used_variables_in_batch_script(variants, script) == {"FOO", "BAR"} diff --git a/tests/utils.py b/tests/utils.py index 5e627299be..4d6803f09d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,39 +1,65 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import contextlib +from __future__ import annotations + import os import shlex import sys +from pathlib import Path +from typing import TYPE_CHECKING -import pytest -from conda_build.metadata import MetaData -from conda_build.utils import on_win - - -def get_root_dir(): - import conda_build - - conda_build_dir = os.path.realpath(os.path.dirname(conda_build.__file__)) - return os.path.abspath(os.path.join(conda_build_dir, "..")) +from conda.base.context import reset_context +from conda.common.compat import on_mac +from conda_build.metadata import MetaData -thisdir = os.path.join(get_root_dir(), "tests") -metadata_dir = os.path.join(thisdir, "test-recipes", "metadata") -subpackage_dir = os.path.join(thisdir, "test-recipes", "split-packages") -fail_dir = os.path.join(thisdir, "test-recipes", "fail") -archive_dir = os.path.join(thisdir, "archives") +if TYPE_CHECKING: + from typing import Generator + +tests_path = Path(__file__).parent +metadata_path = tests_path / "test-recipes" / "metadata" +subpackage_path = tests_path / "test-recipes" / "split-packages" +fail_path = tests_path / "test-recipes" / "fail" +variants_path = tests_path / "test-recipes" / "variants" +dll_path = tests_path / "test-recipes" / "dll-package" +go_path = tests_path / "test-recipes" / "go-package" +published_path = tests_path / "test-recipes" / "published_code" +archive_path = tests_path / "archives" +cran_path = tests_path / "test-cran-skeleton" + +# backport +thisdir = str(tests_path) +metadata_dir = str(metadata_path) +subpackage_dir = str(subpackage_path) +fail_dir = str(fail_path) +variants_dir = str(variants_path) +dll_dir = str(dll_path) +go_dir = str(go_path) +published_dir = str(published_path) +archive_dir = str(archive_path) +cran_dir = str(cran_path) + + +def is_valid_dir(*parts: Path | str) -> bool: + path = Path(*parts) + return ( + # only directories are valid recipes + path.is_dir() + # recipes prefixed with _ are special and shouldn't be run as part of bulk tests + and not path.name.startswith("_") + # exclude macOS-only recipes + and (path.name not in ["osx_is_app"] or on_mac) + ) -def is_valid_dir(parent_dir, dirname): - valid = os.path.isdir(os.path.join(parent_dir, dirname)) - valid &= not dirname.startswith("_") - valid &= "osx_is_app" != dirname or sys.platform == "darwin" - return valid +def get_valid_recipes(*parts: Path | str) -> Generator[Path, None, None]: + yield from filter(is_valid_dir, Path(*parts).iterdir()) def add_mangling(filename): - filename = os.path.splitext(filename)[0] + ".cpython-{}{}.py".format( - sys.version_info.major, sys.version_info.minor + filename = ( + os.path.splitext(filename)[0] + + f".cpython-{sys.version_info.major}{sys.version_info.minor}.py" ) filename = os.path.join( os.path.dirname(filename), "__pycache__", os.path.basename(filename) @@ -65,8 +91,7 @@ def assert_package_consistency(package_path): has_prefix_present = False except tarfile.ReadError: raise RuntimeError( - "Could not extract metadata from %s. " - "File probably corrupt." % package_path + f"Could not extract metadata from {package_path}. File probably corrupt." ) errors = [] member_set = set(member_list) # The tar format allows duplicates in member_list @@ -75,7 +100,7 @@ def assert_package_consistency(package_path): file_set = set(file_list) # Check that there are no duplicates in info/files if len(file_list) != len(file_set): - errors.append("Duplicate files in info/files in %s" % package_path) + errors.append(f"Duplicate files in info/files in {package_path}") # Compare the contents of files and members unlisted_members = member_set.difference(file_set) missing_members = file_set.difference(member_set) @@ -83,14 +108,16 @@ def assert_package_consistency(package_path): missing_files = [m for m in unlisted_members if not m.startswith("info/")] if len(missing_files) > 0: errors.append( - "The following package files are not listed in " - "info/files: %s" % ", ".join(missing_files) + "The following package files are not listed in info/files: {}".format( + ", ".join(missing_files) + ) ) # Find any files missing in the archive if len(missing_members) > 0: errors.append( - "The following files listed in info/files are missing: " - "%s" % ", ".join(missing_members) + "The following files listed in info/files are missing: {}".format( + ", ".join(missing_members) + ) ) # Find any files in has_prefix that are not present in files if has_prefix_present: @@ -103,52 +130,26 @@ def assert_package_consistency(package_path): elif len(parts) == 3: prefix_path_list.append(parts[2]) else: - errors.append("Invalid has_prefix file in package: %s" % package_path) + errors.append(f"Invalid has_prefix file in package: {package_path}") prefix_path_set = set(prefix_path_list) if len(prefix_path_list) != len(prefix_path_set): - errors.append("Duplicate files in info/has_prefix in %s" % package_path) + errors.append(f"Duplicate files in info/has_prefix in {package_path}") prefix_not_in_files = prefix_path_set.difference(file_set) if len(prefix_not_in_files) > 0: errors.append( "The following files listed in info/has_prefix are missing " - "from info/files: %s" % ", ".join(prefix_not_in_files) + "from info/files: {}".format(", ".join(prefix_not_in_files)) ) # Assert that no errors are detected assert len(errors) == 0, "\n".join(errors) -@contextlib.contextmanager -def put_bad_conda_on_path(testing_workdir): - path_backup = os.environ["PATH"] - # it is easier to add an intentionally bad path than it is to try to scrub any existing path - os.environ["PATH"] = os.pathsep.join([testing_workdir, os.environ["PATH"]]) - - exe_name = "conda.bat" if on_win else "conda" - out_exe = os.path.join(testing_workdir, exe_name) - with open(out_exe, "w") as f: - f.write("exit 1") - st = os.stat(out_exe) - os.chmod(out_exe, st.st_mode | 0o111) - try: - yield - except: - raise - finally: - os.environ["PATH"] = path_backup - - def get_noarch_python_meta(meta): d = meta.meta d["build"]["noarch"] = "python" return MetaData.fromdict(d, config=meta.config) -@pytest.fixture(autouse=True) -def skip_serial(request): - if ( - request.node.get_marker("serial") - and getattr(request.config, "slaveinput", {}).get("slaveid", "local") != "local" - ): - # under xdist and serial - pytest.skip("serial") +def reset_config(search_path=None): + reset_context(search_path) diff --git a/tests/variant_recipe/build_config.yaml b/tests/variant_recipe/build_config.yaml deleted file mode 100644 index 10feaf73db..0000000000 --- a/tests/variant_recipe/build_config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -requirements: - build: - # git chosen here because it is independent of compiler on win - - git diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index a142bf53e8..0000000000 --- a/versioneer.py +++ /dev/null @@ -1,2140 +0,0 @@ - -# Version: 0.22 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible with: Python 3.6, 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in distutils/setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere in your $PATH -* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) -* run `versioneer install` in your source tree, commit the results -* Verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.ConfigParser() - with open(setup_cfg, "r") as cfg_file: - parser.read_file(cfg_file) - VCS = parser.get("versioneer", "VCS") # mandatory - - # Dict-like interface for non-mandatory entries - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%%s*" %% tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - my_path = __file__ - if my_path.endswith(".pyc") or my_path.endswith(".pyo"): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.22) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools/distutils subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to both distutils and setuptools - try: - from setuptools import Command - except ImportError: - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - elif "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - elif "setuptools" in sys.modules: - from setuptools.command.build_ext import build_ext as _build_ext - else: - from distutils.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - elif "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except OSError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1)