├── broken-recipes └── .gitignore ├── setup.cfg ├── conda_build_config.yaml ├── conda-forge.yml ├── .gitattributes ├── .ci_support ├── win64.yaml ├── requirements.txt ├── osx64.yaml ├── linux64.yaml ├── linux64_cuda112.yaml ├── linux64_cuda118.yaml ├── linux64_cuda120.yaml ├── build_all.py └── compute_build_graph.py ├── .gitignore ├── azure-pipelines.yml ├── .github ├── workflows │ ├── scripts │ │ ├── print_tokens.py │ │ ├── create_feedstocks │ │ └── create_feedstocks.py │ ├── do_not_edit_example.yml │ ├── correct_directory.yml │ ├── tokens.yml.notused │ ├── README.md │ ├── create_feedstocks.yml │ └── automate-review-labels.yml ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── bugs_template.yml │ ├── issue_template.yml │ └── package-request.yml ├── stale.yml └── pull_request_template.md ├── .azure-pipelines ├── azure-pipelines-osx.yml ├── azure-pipelines-win.yml └── azure-pipelines-linux.yml ├── .scripts ├── logging_utils.sh ├── run_osx_build.sh ├── run_win_build.bat ├── build_steps.sh └── run_docker_build.sh ├── LICENSE.txt ├── .appveyor.yml.notused ├── recipes └── example │ └── meta.yaml └── README.md /broken-recipes/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length=88 3 | -------------------------------------------------------------------------------- /conda_build_config.yaml: -------------------------------------------------------------------------------- 1 | MACOSX_DEPLOYMENT_TARGET: # [osx] 2 | - 11.0 # [osx] -------------------------------------------------------------------------------- /conda-forge.yml: -------------------------------------------------------------------------------- 1 | channel_priority: strict 2 | azure: 3 | settings_win: 4 | pool: 5 | vmImage: windows-2022 6 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | 3 | *.patch binary 4 | *.diff binary 5 | 6 | *.yaml text eol=lf 7 | *.sh text eol=lf 8 | *.bat text eol=crlf 9 | -------------------------------------------------------------------------------- /.ci_support/win64.yaml: -------------------------------------------------------------------------------- 1 | go_compiler: 2 | - go-nocgo 3 | cgo_compiler: 4 | - go-cgo 5 | channel_sources: 6 | - conda-forge 7 | target_platform: 8 | - win-64 9 | -------------------------------------------------------------------------------- /.ci_support/requirements.txt: -------------------------------------------------------------------------------- 1 | conda>=23.7.3 2 | conda-libmamba-solver>=23.7.0 3 | conda-build>=24.3 4 | conda-index>=0.3.0 5 | conda-forge-ci-setup=4.* 6 | conda-forge-pinning 7 | frozendict 8 | networkx=2.4 9 | -------------------------------------------------------------------------------- /.ci_support/osx64.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - clang 3 | cxx_compiler: 4 | - clangxx 5 | fortran_compiler: 6 | - gfortran 7 | go_compiler: 8 | - go-nocgo 9 | cgo_compiler: 10 | - go-cgo 11 | channel_sources: 12 | - conda-forge 13 | target_platform: 14 | - osx-64 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # macOS folder metadata 2 | .DS_Store 3 | 4 | # User builds 5 | build_artifacts 6 | 7 | # Compiled Python code 8 | __pycache__ 9 | *.pyc 10 | 11 | # Editor files 12 | *.swp 13 | .idea 14 | *.iml 15 | .vscode 16 | 17 | # Jupyter checkpoints 18 | **/.ipynb_checkpoints/* 19 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # Don't run build jobs for branches 2 | trigger: 3 | batch: true 4 | branches: 5 | exclude: 6 | - "*" 7 | 8 | # Still allow PR builds to run against main 9 | pr: 10 | - main 11 | 12 | jobs: 13 | - template: ./.azure-pipelines/azure-pipelines-linux.yml 14 | - template: ./.azure-pipelines/azure-pipelines-osx.yml 15 | - template: ./.azure-pipelines/azure-pipelines-win.yml 16 | -------------------------------------------------------------------------------- /.github/workflows/scripts/print_tokens.py: -------------------------------------------------------------------------------- 1 | import os 2 | import github 3 | 4 | for token in [ 5 | "GH_TOKEN", 6 | "GH_TRAVIS_TOKEN", 7 | "GH_DRONE_TOKEN", 8 | "ORGWIDE_GH_TRAVIS_TOKEN" 9 | ]: 10 | try: 11 | gh = github.Github(os.environ[token]) 12 | login = gh.get_user().login 13 | except Exception: 14 | loging = "NOT FOUND" 15 | 16 | print("%s: %s" % (token, login)) 17 | -------------------------------------------------------------------------------- /.ci_support/linux64.yaml: -------------------------------------------------------------------------------- 1 | cdt_name: 2 | - cos7 3 | c_compiler: 4 | - gcc 5 | cxx_compiler: 6 | - gxx 7 | fortran_compiler: 8 | - gfortran 9 | go_compiler: 10 | - go-nocgo 11 | cgo_compiler: 12 | - go-cgo 13 | target_platform: 14 | - linux-64 15 | channel_sources: 16 | - conda-forge 17 | docker_image: 18 | - quay.io/condaforge/linux-anvil-cos7-x86_64 19 | cuda_compiler: 20 | - None 21 | cuda_compiler_version: 22 | - None 23 | cuda_compiler_version_min: 24 | - None 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Conda-forge gitter chat 4 | url: https://gitter.im/conda-forge/conda-forge.github.io 5 | about: Chat to us about conda-forge and ask general questions. 6 | - name: Conda-forge documentation 7 | url: https://conda-forge.org/docs/ 8 | about: The complete conda-forge documentation. 9 | - name: Conda-forge packages 10 | url: https://conda-forge.org/feedstock-outputs/ 11 | about: For when you have an issue with a specific package. 12 | -------------------------------------------------------------------------------- /.ci_support/linux64_cuda112.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | cxx_compiler: 4 | - gxx 5 | fortran_compiler: 6 | - gfortran 7 | go_compiler: 8 | - go-nocgo 9 | cgo_compiler: 10 | - go-cgo 11 | c_compiler_version: 12 | - 10 13 | cxx_compiler_version: 14 | - 10 15 | fortran_compiler_version: 16 | - 10 17 | cdt_name: 18 | - cos7 19 | target_platform: 20 | - linux-64 21 | channel_sources: 22 | - conda-forge 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-cuda:11.2 25 | cuda_compiler: 26 | - nvcc 27 | cuda_compiler_version: 28 | - 11.2 29 | cuda_compiler_version_min: 30 | - 11.2 31 | -------------------------------------------------------------------------------- /.ci_support/linux64_cuda118.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | cxx_compiler: 4 | - gxx 5 | fortran_compiler: 6 | - gfortran 7 | go_compiler: 8 | - go-nocgo 9 | cgo_compiler: 10 | - go-cgo 11 | c_compiler_version: 12 | - 11 13 | cxx_compiler_version: 14 | - 11 15 | fortran_compiler_version: 16 | - 11 17 | cdt_name: 18 | - cos7 19 | target_platform: 20 | - linux-64 21 | channel_sources: 22 | - conda-forge 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-cuda:11.8 25 | cuda_compiler: 26 | - nvcc 27 | cuda_compiler_version: 28 | - 11.8 29 | cuda_compiler_version_min: 30 | - 11.2 31 | -------------------------------------------------------------------------------- /.ci_support/linux64_cuda120.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | cxx_compiler: 4 | - gxx 5 | fortran_compiler: 6 | - gfortran 7 | go_compiler: 8 | - go-nocgo 9 | cgo_compiler: 10 | - go-cgo 11 | c_compiler_version: 12 | - 12 13 | cxx_compiler_version: 14 | - 12 15 | fortran_compiler_version: 16 | - 12 17 | cdt_name: 18 | - cos7 19 | target_platform: 20 | - linux-64 21 | channel_sources: 22 | - conda-forge 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-cos7-x86_64 25 | cuda_compiler: 26 | - cuda-nvcc 27 | cuda_compiler_version: 28 | - 12.0 29 | cuda_compiler_version_min: 30 | - 11.2 31 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-osx.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | jobs: 6 | - job: osx 7 | condition: not(eq(variables['Build.SourceBranch'], 'refs/heads/main')) 8 | pool: 9 | vmImage: macOS-12 10 | strategy: 11 | matrix: 12 | osx_64: 13 | CONFIG: osx64 14 | maxParallel: 8 15 | timeoutInMinutes: 360 16 | 17 | steps: 18 | - script: | 19 | export CI=azure 20 | ./.scripts/run_osx_build.sh 21 | displayName: Run OSX build 22 | 23 | - publish: /Users/runner/Miniforge3/conda-bld/osx-64/ 24 | artifact: conda_pkgs_osx 25 | -------------------------------------------------------------------------------- /.github/workflows/do_not_edit_example.yml: -------------------------------------------------------------------------------- 1 | name: do_not_edit_example 2 | 3 | on: 4 | pull_request_target: 5 | paths: 6 | - 'recipes/example/meta.yaml' 7 | 8 | jobs: 9 | comment: 10 | name: Notify user about not editing example recipe 11 | runs-on: "ubuntu-latest" 12 | steps: 13 | - uses: actions/checkout@v2 14 | 15 | - name: Comment on PR 16 | uses: actions/github-script@v6 17 | with: 18 | script: | 19 | github.rest.issues.createComment({ 20 | issue_number: context.issue.number, 21 | owner: context.repo.owner, 22 | repo: context.repo.repo, 23 | body: 'Hi! Thanks for your contribution to conda-forge.' + 24 | '\nWhen submitting a pull request, please do not change anything in the example recipe.\n' + 25 | 'Please make sure that any changes are reverted before you submit it for review.\n' + 26 | 'Thanks!' 27 | }) 28 | -------------------------------------------------------------------------------- /.scripts/logging_utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Provide a unified interface for the different logging 4 | # utilities CI providers offer. If unavailable, provide 5 | # a compatible fallback (e.g. bare `echo xxxxxx`). 6 | 7 | function startgroup { 8 | # Start a foldable group of log lines 9 | # Pass a single argument, quoted 10 | case ${CI:-} in 11 | azure ) 12 | echo "##[group]$1";; 13 | travis ) 14 | echo "$1" 15 | echo -en 'travis_fold:start:'"${1// /}"'\\r';; 16 | github_actions ) 17 | echo "::group::$1";; 18 | * ) 19 | echo "$1";; 20 | esac 21 | } 2> /dev/null 22 | 23 | function endgroup { 24 | # End a foldable group of log lines 25 | # Pass a single argument, quoted 26 | 27 | case ${CI:-} in 28 | azure ) 29 | echo "##[endgroup]";; 30 | travis ) 31 | echo -en 'travis_fold:end:'"${1// /}"'\\r';; 32 | github_actions ) 33 | echo "::endgroup::";; 34 | esac 35 | } 2> /dev/null 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bugs_template.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report a bug in staged-recipes 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | _Please note that conda-forge team doesn't follow this repo because of the high volume of notifications._ 9 | 10 | If you would like to get the attention of the conda-forge team, please do one of the following: 11 | 12 | 1. If the issue is related to the staged-recipes infrastructure, ping the `@conda-forge/staged-recipes` team in this issue. 13 | _Note:_ If you're not a member of the conda-forge GitHub organization, this will be disabled by GitHub and you can ask the bot to ping the team for you by entering the following command in a comment: `@conda-forge-admin, please ping conda-forge/staged-recipes` 14 | 2. If the issue is related to conda-forge, please open an issue in the [general conda-forge repo](https://github.com/conda-forge/conda-forge.github.io). 15 | 3. If you need help, join our [gitter](https://gitter.im/conda-forge/conda-forge.github.io) community chat room. 16 | 17 | - type: textarea 18 | id: comment 19 | attributes: 20 | label: "Bug:" 21 | -------------------------------------------------------------------------------- /.github/workflows/correct_directory.yml: -------------------------------------------------------------------------------- 1 | name: directory_linter 2 | 3 | on: 4 | pull_request_target: 5 | paths: 6 | - 'recipes/*.yml' 7 | - 'recipes/*.yaml' 8 | 9 | jobs: 10 | comment: 11 | name: Notify user about wrong dir 12 | runs-on: "ubuntu-latest" 13 | steps: 14 | - uses: actions/checkout@v2 15 | 16 | - name: Comment on PR 17 | uses: actions/github-script@v6 18 | with: 19 | script: | 20 | github.rest.issues.createComment({ 21 | issue_number: context.issue.number, 22 | owner: context.repo.owner, 23 | repo: context.repo.repo, 24 | body: 'Hi! Thanks for your contribution to conda-forge.' + 25 | '\nUnfortunately, the recipe was added directly in the `recipes` folder without its own subfolder.\n' + 26 | 'Please move the recipe file into a folder with the name of the package you want to submit.\n\n' + 27 | 'For example: if your recipe is currently under `recipes/.yaml`, ' + 28 | 'it should be moved to `recipes//meta.yaml`.\n' + 29 | 'Thanks!' 30 | }) 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue_template.yml: -------------------------------------------------------------------------------- 1 | name: General question 2 | description: Ask a general question about staged-recipes 3 | labels: question 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | _Please note that conda-forge team doesn't follow this repo because of the high volume of notifications._ 9 | 10 | If you would like to get the attention of the conda-forge team, please do one of the following: 11 | 12 | 1. If the issue is related to the staged-recipes infrastructure, ping the `@conda-forge/staged-recipes` team in this issue. 13 | _Note:_ If you're not a member of the conda-forge GitHub organization, this will be disabled by GitHub and you can ask the bot to ping the team for you by entering the following command in a comment: `@conda-forge-admin, please ping conda-forge/staged-recipes` 14 | 2. If the issue is related to conda-forge, please open an issue in the [general conda-forge repo](https://github.com/conda-forge/conda-forge.github.io). 15 | 3. If you need help, join our [gitter](https://gitter.im/conda-forge/conda-forge.github.io) community chat room. 16 | 17 | - type: textarea 18 | id: comment 19 | attributes: 20 | label: "General comment:" 21 | -------------------------------------------------------------------------------- /.github/workflows/tokens.yml.notused: -------------------------------------------------------------------------------- 1 | name: tokens 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | tokens: 10 | name: tokens 11 | runs-on: "ubuntu-latest" 12 | steps: 13 | - uses: actions/checkout@v2 14 | 15 | - uses: conda-incubator/setup-miniconda@v2 16 | with: 17 | python-version: 3.8 18 | channels: conda-forge,defaults 19 | channel-priority: strict 20 | show-channel-urls: true 21 | miniforge-version: latest 22 | miniforge-variant: Mambaforge 23 | 24 | - name: configure conda and install code 25 | shell: bash -l {0} 26 | run: | 27 | conda config --set always_yes yes 28 | conda config --add channels conda-forge 29 | mamba install --quiet pip 30 | 31 | mamba install -y -q pygithub 32 | 33 | - name: tokens 34 | shell: bash -l {0} 35 | run: | 36 | python .github/workflows/scripts/print_tokens.py 37 | env: 38 | GH_TOKEN: ${{ secrets.GH_TOKEN }} 39 | GH_TRAVIS_TOKEN: ${{ secrets.GH_TRAVIS_TOKEN }} 40 | GH_DRONE_TOKEN: ${{ secrets.GH_DRONE_TOKEN }} 41 | ORGWIDE_GH_TRAVIS_TOKEN: ${{ secrets.ORGWIDE_GH_TRAVIS_TOKEN }} 42 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-win.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: win_64 3 | condition: not(eq(variables['Build.SourceBranch'], 'refs/heads/main')) 4 | pool: 5 | vmImage: windows-2022 6 | strategy: 7 | maxParallel: 4 8 | matrix: 9 | win: 10 | CONFIG: win64 11 | timeoutInMinutes: 360 12 | steps: 13 | - task: PythonScript@0 14 | displayName: 'Download Miniforge' 15 | inputs: 16 | scriptSource: inline 17 | script: | 18 | import urllib.request 19 | url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Windows-x86_64.exe' 20 | path = r"$(Build.ArtifactStagingDirectory)/Miniforge.exe" 21 | urllib.request.urlretrieve(url, path) 22 | - script: | 23 | start /wait "" %BUILD_ARTIFACTSTAGINGDIRECTORY%\Miniforge.exe /InstallationType=JustMe /RegisterPython=0 /S /D=C:\Miniforge 24 | displayName: Install Miniforge 25 | - powershell: Write-Host "##vso[task.prependpath]C:\Miniforge\Scripts" 26 | displayName: Add conda to PATH 27 | 28 | - script: | 29 | call .scripts\run_win_build.bat 30 | displayName: Build recipes 31 | env: 32 | CI: azure 33 | CONDA_BLD_PATH: C:\bld 34 | 35 | - publish: C:\\bld\\win-64\\ 36 | artifact: conda_pkgs_win 37 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015-2023, conda-forge 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of staged-recipes nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /.github/workflows/scripts/create_feedstocks: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | # Ensure we are on the latest commit 7 | # of the branch where we are converting 8 | # recipes from. Currently this is `main`. 9 | export CF_CURRENT_BRANCH="${GITHUB_REF/refs\/heads\//}" 10 | git checkout "${CF_CURRENT_BRANCH}" 11 | 12 | # 2 core available on Travis CI Linux workers: https://docs.travis-ci.com/user/ci-environment/#Virtualization-environments 13 | # CPU_COUNT is passed through conda build: https://github.com/conda/conda-build/pull/1149 14 | export CPU_COUNT=2 15 | 16 | export PYTHONUNBUFFERED=1 17 | 18 | # Install Miniforge3. 19 | echo "" 20 | echo "Installing a fresh version of Miniforge3." 21 | curl -L "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" > ~/Miniforge3.sh 22 | chmod +x ~/Miniforge3.sh 23 | bash ~/Miniforge3.sh -b -p ~/Miniforge3 24 | touch ~/Miniforge3/conda-meta/pinned 25 | ( 26 | source ~/Miniforge3/bin/activate 27 | 28 | # Configure conda. 29 | echo "" 30 | echo "Configuring conda." 31 | conda config --set show_channel_urls true 32 | conda config --set auto_update_conda false 33 | conda config --set add_pip_as_python_dependency false 34 | conda config --set solver libmamba 35 | 36 | conda update -n base --yes --quiet conda mamba conda-libmamba-solver 37 | ) 38 | source ~/Miniforge3/bin/activate 39 | 40 | conda install --yes --quiet \ 41 | conda-forge-ci-setup=4.* \ 42 | "conda-smithy>=3.7.1,<4.0.0a0" \ 43 | conda-forge-pinning \ 44 | "conda-build>=24.3" \ 45 | "gitpython>=3.0.8,<3.1.20" \ 46 | requests \ 47 | ruamel.yaml \ 48 | "pygithub>=2.1.1" 49 | 50 | conda info 51 | mamba info 52 | conda config --get 53 | git config --global init.defaultBranch main 54 | 55 | python .github/workflows/scripts/create_feedstocks.py 56 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | daysUntilStale: 150 2 | daysUntilClose: 30 3 | only: pulls 4 | limitPerRun: 10 5 | staleLabel: stale 6 | 7 | # Comment to post when marking an issue as stale. Set to `false` to disable 8 | markComment: > 9 | Hi friend! 10 | 11 | 12 | We really, really, really appreciate that you have taken the time 13 | to make a PR on `conda-forge/staged-recipes`! `conda-forge` only exists 14 | because people like you donate their time to build and maintain conda recipes 15 | for use by the community. 16 | 17 | 18 | In an effort to maintain this repository and increase 19 | the signal-to-noise for open PRs, the maintainers of `staged-recipes` close 20 | excessively old PRs after six months. This PR will remain open 21 | for another month, and then will be closed. 22 | 23 | 24 | If you'd like to keep it open, please comment/push and we will be happy to oblige! 25 | Note that very old PRs will likely need to be rebased on `main` so that they can 26 | be rebuilt with the most recent CI scripts. If you have any trouble, or we missed 27 | reviewing this PR in the first place (sorry!), feel free 28 | to [ping the team](https://conda-forge.org/docs/maintainer/infrastructure.html#conda-forge-admin-please-ping-team) 29 | using a special command in a comment on the PR to get the attention of the 30 | `staged-recipes` team. 31 | 32 | 33 | Cheers and thank you for contributing to this community effort! 34 | 35 | # Comment to post when closing a stale issue. Set to `false` to disable 36 | closeComment: > 37 | Hi again! About a month ago, we commented on this PR saying it would be 38 | closed in another month if it was still inactive. It has been a month and 39 | so now it is being closed. Thank you so much for making it in the first place 40 | and contributing to the community project that is `conda-forge`. If you'd like 41 | to reopen this PR, please feel free to do so at any time! 42 | 43 | 44 | Cheers and have a great day! 45 | -------------------------------------------------------------------------------- /.appveyor.yml.notused: -------------------------------------------------------------------------------- 1 | image: Visual Studio 2017 2 | 3 | skip_commits: 4 | message: /^Merge pull request / 5 | 6 | environment: 7 | 8 | CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" 9 | CONFIG: win64 10 | 11 | matrix: 12 | - PLATFORM: "64" 13 | 14 | artifacts: 15 | # Store built conda packages as artifacts 16 | - path: 'conda_packages\*.bz2' 17 | 18 | platform: 19 | - x64 20 | 21 | install: 22 | # Find the recipes from main in this PR and remove them. 23 | - cmd: echo Finding recipes merged in main and removing them. 24 | - cmd: cd recipes 25 | - cmd: | 26 | for /f "tokens=*" %%a in ('git ls-tree --name-only main -- .') do rmdir /s /q %%a && echo Removing recipe: %%a 27 | - cmd: cd .. 28 | 29 | # Remove cygwin (and therefore the git that comes with it). 30 | - cmd: rmdir C:\cygwin /s /q 31 | 32 | # Use the pre-installed Miniconda for the desired arch 33 | - cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat 34 | - cmd: conda.exe config --add channels conda-forge 35 | - cmd: conda.exe config --set show_channel_urls true 36 | - cmd: appveyor-retry conda.exe update --yes --quiet conda 37 | 38 | 39 | - cmd: appveyor-retry conda.exe install --yes --quiet "conda>4.7.12" conda-forge-pinning conda-forge-ci-setup=2.* networkx=2.3 "conda-build>=3.18,!=3.28.3" "boa" 40 | 41 | - cmd: setup_conda_rc .\ .\recipes .\.ci_support\%CONFIG%.yaml 42 | - cmd: appveyor-retry run_conda_forge_build_setup 43 | 44 | # Skip .NET project specific build phase. 45 | build: off 46 | 47 | test_script: 48 | - python .ci_support\build_all.py --arch %PLATFORM% 49 | 50 | # copy any newly created conda packages into the conda_packages dir 51 | - cmd: mkdir conda_packages 52 | # Uncomment the following two lines to make any conda packages created 53 | # available as build artifacts in AppVeyor 54 | #- cmd: 'copy /Y C:\Miniconda-x64\conda-bld\win-64\*.bz2 conda_packages || cmd /c "exit /b 0"' 55 | -------------------------------------------------------------------------------- /.github/workflows/README.md: -------------------------------------------------------------------------------- 1 | # Automated Review Labels 2 | 3 | These diagrams explain the workflow of the automated review labels actions. 4 | 5 | ## Workflow 6 | 7 | ```mermaid 8 | sequenceDiagram 9 | actor User 10 | participant issue 11 | participant GitHub actions 12 | actor Authenticated bot 13 | actor Reviewer 14 | Note right of Authenticated bot: Authenticated bot is a GitHub action with the necessary rights for getMembershipForUserInOrg 15 | User->>issue: Adds any comment 16 | activate GitHub actions 17 | alt is PR AND no review-requested label exists 18 | loop Over ['@conda-forge/staged-recipes', '@conda-forge/help-python', '@conda-forge/help-python-c', '@conda-forge/help-r', '@conda-forge/help-java', '@conda-forge/help-nodejs', '@conda-forge/help-c-cpp', '@conda-forge/help-perl', '@conda-forge/help-julia', '@conda-forge/help-ruby'] 19 | alt team is in comment 20 | GitHub actions->>issue: Adds review-requested label 21 | GitHub actions->>issue: Adds team label 22 | GitHub actions->>issue: Remove Awaiting author contribution label 23 | rect rgb(191, 223, 255) 24 | GitHub actions-->>issue: Assigns team for review 25 | end 26 | end 27 | end 28 | end 29 | deactivate GitHub actions 30 | Reviewer->>issue: Adds a review comment 31 | activate Authenticated bot 32 | alt is PR AND commenter is not author of PR AND review-requested label exists 33 | Authenticated bot-->>Reviewer: Checks membership 34 | alt Reviewer is part of staged-recipes 35 | Authenticated bot->>issue: Remove review-requested label 36 | Authenticated bot->>issue: Add Awaiting author contribution label 37 | end 38 | end 39 | deactivate Authenticated bot 40 | ``` 41 | 42 | _Note_: The blue part is proposed and not actually integrated yet. 43 | 44 | ## Label State Diagram 45 | 46 | ```mermaid 47 | stateDiagram-v2 48 | [*] --> reviewrequested,team: User pings team 49 | reviewrequested,team --> awaitingauthor,team: staged-recipes reviews 50 | awaitingauthor,team --> reviewrequested,team: User pings team 51 | ``` 52 | -------------------------------------------------------------------------------- /.github/workflows/create_feedstocks.yml: -------------------------------------------------------------------------------- 1 | name: Create feedstocks 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | schedule: 8 | - cron: '*/10 * * * *' 9 | workflow_dispatch: null 10 | 11 | permissions: {} 12 | jobs: 13 | create-feedstocks: 14 | permissions: 15 | contents: write # for git push 16 | actions: read # to read runs 17 | 18 | if: github.repository == 'conda-forge/staged-recipes' 19 | name: Create feedstocks 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - name: Checkout code 24 | uses: actions/checkout@v2 25 | with: 26 | token: ${{ github.token }} 27 | 28 | - name: Prevent multiple jobs running in parallel 29 | id: conversion_lock 30 | uses: beckermr/turnstyle-python@v1 31 | with: 32 | abort-after-seconds: 3 33 | poll-interval-seconds: 2 34 | github-token: ${{ secrets.GITHUB_TOKEN }} 35 | continue-on-error: true 36 | 37 | - name: commit any changes upon checkout 38 | run: | 39 | git config --global user.email "pelson.pub+conda-forge@gmail.com" 40 | git config --global user.name "conda-forge-admin" 41 | git add * 42 | git commit -am "make sure we have no windows line endings" || exit 0 43 | for i in `seq 1 5`; do 44 | git pull 45 | git push 46 | done 47 | 48 | - name: Run feedstock creation 49 | # outcome is evaluated before continue-on-error above 50 | if: ${{ steps.conversion_lock.outcome == 'success' }} 51 | run: | 52 | # Avoid wasting CI time if there are no recipes ready for conversion 53 | if [ "$(ls recipes/*/meta.yaml | grep -v recipes/example/meta.yaml --count)" -eq 0 ]; then 54 | echo "No new recipes found, exiting..." 55 | exit 0 56 | fi 57 | 58 | echo "Creating feedstocks from the recipe(s)." 59 | 60 | source ./.github/workflows/scripts/create_feedstocks 61 | env: 62 | STAGING_BINSTAR_TOKEN: ${{ secrets.STAGING_BINSTAR_TOKEN }} 63 | GH_TOKEN: ${{ secrets.CF_ADMIN_GITHUB_TOKEN }} 64 | TRAVIS_TOKEN: ${{ secrets.ORGWIDE_TRAVIS_TOKEN }} 65 | AZURE_TOKEN: ${{ secrets.AZURE_TOKEN }} 66 | -------------------------------------------------------------------------------- /.scripts/run_osx_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | source .scripts/logging_utils.sh 6 | 7 | ( startgroup "Ensuring Miniforge" ) 2> /dev/null 8 | 9 | MINIFORGE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" 10 | MINIFORGE_FILE="Miniforge3-MacOSX-x86_64.sh" 11 | MINIFORGE_ROOT="${MINIFORGE_ROOT:-${HOME}/Miniforge3}" 12 | 13 | if [[ -d "${MINIFORGE_ROOT}" ]]; then 14 | echo "Miniforge already installed at ${MINIFORGE_ROOT}." 15 | else 16 | echo "Installing Miniforge" 17 | curl -L -O "${MINIFORGE_URL}/${MINIFORGE_FILE}" 18 | bash $MINIFORGE_FILE -bp "${MINIFORGE_ROOT}" 19 | fi 20 | 21 | ( endgroup "Ensuring Miniforge" ) 2> /dev/null 22 | 23 | ( startgroup "Configuring conda" ) 2> /dev/null 24 | 25 | cat >~/.condarc < /dev/null 63 | git fetch --force origin main:main 64 | git ls-tree --name-only main -- . | xargs -I {} sh -c "rm -rf {} && echo Removing recipe: {}" 65 | popd > /dev/null 66 | echo "" 67 | 68 | ( endgroup "Configuring conda" ) 2> /dev/null 69 | 70 | # We just want to build all of the recipes. 71 | echo "Building all recipes" 72 | python .ci_support/build_all.py 73 | -------------------------------------------------------------------------------- /.scripts/run_win_build.bat: -------------------------------------------------------------------------------- 1 | :: PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 2 | :: will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 3 | :: changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 4 | :: benefit from the improvement. 5 | 6 | :: Note: we assume a Miniforge installation is available 7 | 8 | :: INPUTS (required environment variables) 9 | :: CONDA_BLD_PATH: path for the conda-build workspace 10 | :: CI: azure, or unset 11 | 12 | setlocal enableextensions enabledelayedexpansion 13 | 14 | call :start_group "Configuring conda" 15 | 16 | if "%CONDA_BLD_PATH%" == "" ( 17 | set "CONDA_BLD_PATH=C:\bld" 18 | ) 19 | 20 | :: Activate the base conda environment 21 | call activate base 22 | 23 | conda.exe config --set always_yes yes 24 | if errorlevel 1 exit 1 25 | conda.exe config --set channel_priority strict 26 | if errorlevel 1 exit 1 27 | conda.exe config --set solver libmamba 28 | if errorlevel 1 exit 1 29 | 30 | echo Installing dependencies 31 | conda.exe install --file .\.ci_support\requirements.txt 32 | if errorlevel 1 exit 1 33 | 34 | :: Set basic configuration 35 | echo Setting up configuration 36 | setup_conda_rc .\ ".\recipes" .\.ci_support\%CONFIG%.yaml 37 | if errorlevel 1 exit 1 38 | 39 | echo Run conda_forge_build_setup 40 | call run_conda_forge_build_setup 41 | if errorlevel 1 exit 1 42 | 43 | echo Force fetch origin/main 44 | git fetch --force origin main:main 45 | if errorlevel 1 exit 1 46 | echo Removing recipes also present in main 47 | cd recipes 48 | for /f "tokens=*" %%a in ('git ls-tree --name-only main -- .') do rmdir /s /q %%a && echo Removing recipe: %%a 49 | cd .. 50 | 51 | :: make sure there is a package directory so that artifact publishing works 52 | if not exist "%CONDA_BLD_PATH%\win-64\" mkdir "%CONDA_BLD_PATH%\win-64\" 53 | 54 | echo Index %CONDA_BLD_PATH% 55 | conda.exe index "%CONDA_BLD_PATH%" 56 | if errorlevel 1 exit 1 57 | 58 | call :end_group 59 | 60 | echo Building all recipes 61 | python .ci_support\build_all.py --arch 64 62 | if errorlevel 1 exit 1 63 | 64 | exit 65 | 66 | :: Logging subroutines 67 | 68 | :start_group 69 | if /i "%CI%" == "github_actions" ( 70 | echo ::group::%~1 71 | exit /b 72 | ) 73 | if /i "%CI%" == "azure" ( 74 | echo ##[group]%~1 75 | exit /b 76 | ) 77 | echo %~1 78 | exit /b 79 | 80 | :end_group 81 | if /i "%CI%" == "github_actions" ( 82 | echo ::endgroup:: 83 | exit /b 84 | ) 85 | if /i "%CI%" == "azure" ( 86 | echo ##[endgroup] 87 | exit /b 88 | ) 89 | exit /b 90 | -------------------------------------------------------------------------------- /.scripts/build_steps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 4 | # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 5 | # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 6 | # benefit from the improvement. 7 | 8 | set -xeuo pipefail 9 | 10 | export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/staged-recipes}" 11 | source "${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh" 12 | 13 | # This closes the matching `startgroup` on `run_docker_build.sh` 14 | ( endgroup "Start Docker" ) 2> /dev/null 15 | 16 | ( startgroup "Configuring conda" ) 2> /dev/null 17 | 18 | export PYTHONUNBUFFERED=1 19 | export CI_SUPPORT="/home/conda/staged-recipes-copy/.ci_support" 20 | 21 | cat >~/.condarc < /dev/null 50 | if [ "${AZURE}" == "True" ]; then 51 | git fetch --force origin main:main 52 | fi 53 | git ls-tree --name-only main -- . | xargs -I {} sh -c "rm -rf ~/staged-recipes-copy/recipes/{} && echo Removing recipe: {}" 54 | popd > /dev/null 55 | 56 | 57 | 58 | conda install --quiet --file ${FEEDSTOCK_ROOT}/.ci_support/requirements.txt 59 | 60 | setup_conda_rc "${FEEDSTOCK_ROOT}" "/home/conda/staged-recipes-copy/recipes" "${CI_SUPPORT}/${CONFIG}.yaml" 61 | source run_conda_forge_build_setup 62 | 63 | # yum installs anything from a "yum_requirements.txt" file that isn't a blank line or comment. 64 | find ~/staged-recipes-copy/recipes -mindepth 2 -maxdepth 2 -type f -name "yum_requirements.txt" \ 65 | | xargs -n1 cat | { grep -v -e "^#" -e "^$" || test $? == 1; } | \ 66 | xargs -r /usr/bin/sudo -n yum install -y 67 | 68 | # Make sure build_artifacts is a valid channel 69 | conda index ${FEEDSTOCK_ROOT}/build_artifacts 70 | 71 | ( endgroup "Configuring conda" ) 2> /dev/null 72 | 73 | echo "Building all recipes" 74 | python ${CI_SUPPORT}/build_all.py 75 | 76 | ( startgroup "Final checks" ) 2> /dev/null 77 | 78 | touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done" 79 | -------------------------------------------------------------------------------- /.scripts/run_docker_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # NOTE: This script has been adapted from content generated by github.com/conda-forge/conda-smithy 4 | 5 | source .scripts/logging_utils.sh 6 | 7 | ( startgroup "Configure Docker" ) 2> /dev/null 8 | 9 | set -xeo pipefail 10 | 11 | REPO_ROOT=$(cd "$(dirname "$0")/.."; pwd;) 12 | ARTIFACTS="$REPO_ROOT/build_artifacts" 13 | THISDIR="$( cd "$( dirname "$0" )" >/dev/null && pwd )" 14 | PROVIDER_DIR="$(basename "$THISDIR")" 15 | AZURE="${AZURE:-False}" 16 | 17 | docker info 18 | 19 | # In order for the conda-build process in the container to write to the mounted 20 | # volumes, we need to run with the same id as the host machine, which is 21 | # normally the owner of the mounted volumes, or at least has write permission 22 | HOST_USER_ID=$(id -u) 23 | # Check if docker-machine is being used (normally on OSX) and get the uid from 24 | # the VM 25 | if hash docker-machine 2> /dev/null && docker-machine active > /dev/null; then 26 | HOST_USER_ID=$(docker-machine ssh $(docker-machine active) id -u) 27 | fi 28 | 29 | if [ -z "${DOCKER_IMAGE}" ]; then 30 | SHYAML_INSTALLED="$(shyaml -h || echo NO)" 31 | if [ "${SHYAML_INSTALLED}" == "NO" ]; then 32 | echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Trying to parse with coreutils" 33 | DOCKER_IMAGE=$(cat .ci_support/${CONFIG}.yaml | grep '^docker_image:$' -A 1 | tail -n 1 | cut -b 3-) 34 | if [ "${DOCKER_IMAGE}" = "" ]; then 35 | echo "No docker_image entry found in ${CONFIG}. Falling back to quay.io/condaforge/linux-anvil-comp7" 36 | DOCKER_IMAGE="quay.io/condaforge/linux-anvil-comp7" 37 | fi 38 | else 39 | DOCKER_IMAGE="$(cat "${REPO_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 quay.io/condaforge/linux-anvil-comp7 )" 40 | fi 41 | fi 42 | 43 | mkdir -p "$ARTIFACTS" 44 | DONE_CANARY="$ARTIFACTS/conda-forge-build-done" 45 | rm -f "$DONE_CANARY" 46 | 47 | DOCKER_RUN_ARGS="-it" 48 | 49 | if [ "${AZURE}" == "True" ]; then 50 | DOCKER_RUN_ARGS="" 51 | fi 52 | ( endgroup "Configure Docker" ) 2> /dev/null 53 | 54 | ( startgroup "Start Docker" ) 2> /dev/null 55 | # this group is closed in build_steps.sh 56 | 57 | docker pull "${DOCKER_IMAGE}" 58 | docker run ${DOCKER_RUN_ARGS} \ 59 | -v "${REPO_ROOT}:/home/conda/staged-recipes" \ 60 | -e HOST_USER_ID=${HOST_USER_ID} \ 61 | -e AZURE=${AZURE} \ 62 | -e CONFIG \ 63 | -e CI \ 64 | -e CPU_COUNT \ 65 | -e DEFAULT_LINUX_VERSION \ 66 | "${DOCKER_IMAGE}" \ 67 | bash \ 68 | "/home/conda/staged-recipes/${PROVIDER_DIR}/build_steps.sh" 69 | 70 | # verify that the end of the script was reached 71 | test -f "$DONE_CANARY" 72 | 73 | # This closes the last group opened in `build_steps.sh` 74 | ( endgroup "Final checks" ) 2> /dev/null 75 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 41 | 42 | Checklist 43 | - [ ] Title of this PR is meaningful: e.g. "Adding my_nifty_package", not "updated meta.yaml". 44 | - [ ] License file is packaged (see [here](https://github.com/conda-forge/staged-recipes/blob/5eddbd7fc9d1502169089da06c3688d9759be978/recipes/example/meta.yaml#L64-L73) for an example). 45 | - [ ] Source is from official source. 46 | - [ ] Package does not vendor other packages. (If a package uses the source of another package, they should be separate packages or the licenses of all packages need to be packaged). 47 | - [ ] If static libraries are linked in, the license of the static library is packaged. 48 | - [ ] Package does not ship static libraries. If static libraries are needed, [follow CFEP-18](https://github.com/conda-forge/cfep/blob/main/cfep-18.md). 49 | - [ ] Build number is 0. 50 | - [ ] A tarball (`url`) rather than a repo (e.g. `git_url`) is used in your recipe (see [here](https://conda-forge.org/docs/maintainer/adding_pkgs.html#build-from-tarballs-not-repos) for more details). 51 | - [ ] GitHub users listed in the maintainer section have posted a comment confirming they are willing to be listed there. 52 | - [ ] When in trouble, please check our [knowledge base documentation](https://conda-forge.org/docs/maintainer/knowledge_base.html) before pinging a team. 53 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/package-request.yml: -------------------------------------------------------------------------------- 1 | name: Package request 2 | description: Request a package you would like to have added to the conda-forge channel 3 | title: "Package request: " 4 | labels: 5 | - Package request 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | **As we are all volunteers here, please note that we cannot always accommodate your request and thus encourage you to try to submit the package yourself.** 11 | 12 | Some information on how to get started with adding a package can be found in the conda-forge docs, linked below. 13 | Please also note, that you do not need to be the original maintainer of the package to add it to conda forge. 14 | Finally, don't hesitate to create an initial PR and our review teams will be happy to help you from there. 15 | 16 | - [Currently available conda-forge packages](https://conda-forge.org/feedstock-outputs/) 17 | - [The conda-forge docs](https://conda-forge.org/docs/maintainer/adding_pkgs.html#the-staging-process) 18 | - [Grayskull](https://github.com/conda-incubator/grayskull) - to automatically generate a recipe for packages on PyPI 19 | - [The conda r skeleton helpers](https://github.com/bgruening/conda_r_skeleton_helper) - to automatically generate a recipe for packages on CRAN 20 | 21 | --- 22 | 23 | If none of these options are helpful, please fill out the following form: 24 | 25 | - type: input 26 | attributes: 27 | label: Package name 28 | description: The name of the package you would like to request. 29 | validations: 30 | required: true 31 | 32 | - type: input 33 | attributes: 34 | label: Package version 35 | description: The version of the package you would like to request, if a specific one is required. 36 | value: Newest 37 | 38 | - type: textarea 39 | attributes: 40 | label: Package website 41 | description: | 42 | The website or repository of the package you would like to request. 43 | placeholder: 1. Website 2. GitHub repository 44 | 45 | - type: textarea 46 | attributes: 47 | label: Package availability 48 | description: | 49 | From where the package can be downloaded. 50 | placeholder: 1. PyPI 2. GitHub releases 51 | validations: 52 | required: true 53 | 54 | - type: textarea 55 | attributes: 56 | label: Additional comments 57 | placeholder: | 58 | For example: This package is a dependency for ... 59 | 60 | - type: checkboxes 61 | id: Duplicates 62 | attributes: 63 | label: Package is not available 64 | description: | 65 | Please verify that the package does not already exist. This should also be checked with _hyphens_ vs. _underscores_ and added identifiers like `python-` and `r-`. 66 | _Note:_ All currently available packages can be found on the conda-forge [website](https://conda-forge.org/feedstock-outputs/). 67 | options: 68 | - label: The package is not available on conda-forge. 69 | required: true 70 | 71 | - type: checkboxes 72 | id: previous 73 | attributes: 74 | label: No previous issues or open PRs 75 | description: Please check that no previous [issue](https://github.com/conda-forge/staged-recipes/issues) exists and that no [PR](https://github.com/conda-forge/staged-recipes/pulls) is currently open. 76 | options: 77 | - label: No previous issue exists and no PR has been opened. 78 | required: true 79 | 80 | -------------------------------------------------------------------------------- /recipes/example/meta.yaml: -------------------------------------------------------------------------------- 1 | # Note: there are many handy hints in comments in this example -- remove them when you've finalized your recipe 2 | # If your package is python based, we recommend using Grayskull to generate it instead: 3 | # https://github.com/conda-incubator/grayskull 4 | 5 | # Jinja variables help maintain the recipe as you'll update the version only here. 6 | # Using the name variable with the URL in line 16 is convenient 7 | # when copying and pasting from another recipe, but not really needed. 8 | {% set name = "simplejson" %} 9 | {% set version = "3.8.2" %} 10 | 11 | package: 12 | name: {{ name|lower }} 13 | version: {{ version }} 14 | 15 | source: 16 | url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz 17 | # If getting the source from GitHub, remove the line above, 18 | # uncomment the line below, and modify as needed. Use releases if available: 19 | # url: https://github.com/simplejson/simplejson/releases/download/{{ version }}/simplejson-{{ version }}.tar.gz 20 | # and otherwise fall back to archive: 21 | # url: https://github.com/simplejson/simplejson/archive/v{{ version }}.tar.gz 22 | sha256: 2b3a0c466fb4a1014ea131c2b8ea7c519f9278eba73d6fcb361b7bdb4fd494e9 23 | # sha256 is the preferred checksum -- you can get it for a file with: 24 | # `openssl sha256 `. 25 | # You may need the openssl package, available on conda-forge: 26 | # `conda install openssl -c conda-forge`` 27 | 28 | build: 29 | # Uncomment the following line if the package is pure Python and the recipe is exactly the same for all platforms. 30 | # It is okay if the dependencies are not built for all platforms/versions, although selectors are still not allowed. 31 | # See https://conda-forge.org/docs/maintainer/knowledge_base.html#noarch-python for more details. 32 | # noarch: python 33 | # If the installation is complex, or different between Unix and Windows, use separate bld.bat and build.sh files instead of this key. 34 | # By default, the package will be built for the Python versions supported by conda-forge and for all major OSs. 35 | # Add the line "skip: True # [py<35]" (for example) to limit to Python 3.5 and newer, or "skip: True # [not win]" to limit to Windows. 36 | # More info about selectors can be found in the conda-build docs: 37 | # https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#preprocessing-selectors 38 | script: {{ PYTHON }} -m pip install . -vv 39 | number: 0 40 | 41 | requirements: 42 | build: 43 | # If your project compiles code (such as a C extension) then add the required compilers as separate entries here. 44 | # Compilers are named 'c', 'cxx' and 'fortran'. 45 | - {{ compiler('c') }} 46 | host: 47 | - python 48 | - pip 49 | run: 50 | - python 51 | 52 | test: 53 | # Some packages might need a `test/commands` key to check CLI. 54 | # List all the packages/modules that `run_test.py` imports. 55 | imports: 56 | - simplejson 57 | - simplejson.tests 58 | # For python packages, it is useful to run pip check. However, sometimes the 59 | # metadata used by pip is out of date. Thus this section is optional if it is 60 | # failing. 61 | requires: 62 | - pip 63 | commands: 64 | - pip check 65 | 66 | about: 67 | home: https://github.com/simplejson/simplejson 68 | summary: 'Simple, fast, extensible JSON encoder/decoder for Python' 69 | description: | 70 | simplejson is a simple, fast, complete, correct and extensible 71 | JSON encoder and decoder for Python 2.5+ and 72 | Python 3.3+. It is pure Python code with no dependencies, but includes 73 | an optional C extension for a serious speed boost. 74 | # Remember to specify the license variants for BSD, Apache, GPL, and LGPL. 75 | # Use the SPDX identifier, e.g: GPL-2.0-only instead of GNU General Public License version 2.0 76 | # See https://spdx.org/licenses/ 77 | license: MIT 78 | # The license_family, i.e. "BSD" if license is "BSD-3-Clause". 79 | # Optional 80 | license_family: MIT 81 | # It is required to include a license file in the package, 82 | # (even if the license doesn't require it) using the license_file entry. 83 | # Please also note that some projects have multiple license files which all need to be added using a valid yaml list. 84 | # See https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#license-file 85 | license_file: LICENSE.txt 86 | # The doc_url and dev_url are optional. 87 | doc_url: https://simplejson.readthedocs.io/ 88 | dev_url: https://github.com/simplejson/simplejson 89 | 90 | extra: 91 | recipe-maintainers: 92 | # GitHub IDs for maintainers of the recipe. 93 | # Always check with the people listed below if they are OK becoming maintainers of the recipe. (There will be spam!) 94 | - LisaSimpson 95 | - LandoCalrissian 96 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-linux.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: linux_64 3 | condition: not(eq(variables['Build.SourceBranch'], 'refs/heads/main')) 4 | pool: 5 | vmImage: ubuntu-latest 6 | timeoutInMinutes: 360 7 | steps: 8 | - script: | 9 | sudo mkdir -p /opt/empty_dir 10 | for d in \ 11 | /opt/ghc \ 12 | /opt/hostedtoolcache \ 13 | /usr/lib/jvm \ 14 | /usr/local/.ghcup \ 15 | /usr/local/android \ 16 | /usr/local/powershell \ 17 | /usr/share/dotnet \ 18 | /usr/share/swift \ 19 | ; do 20 | sudo rsync --stats -a --delete /opt/empty_dir/ $d || true 21 | done 22 | displayName: Manage disk space 23 | 24 | - script: | 25 | sudo fallocate -l 10GiB /swapfile || true 26 | sudo chmod 600 /swapfile || true 27 | sudo mkswap /swapfile || true 28 | sudo swapon /swapfile || true 29 | displayName: Create swap file 30 | 31 | - script: | 32 | # sudo pip install --upgrade pip 33 | sudo pip install setuptools shyaml 34 | displayName: Install dependencies 35 | 36 | - script: | 37 | set -e 38 | 39 | # make sure there is a package directory so that artifact publishing works 40 | mkdir -p build_artifacts/{noarch,linux-64}/ 41 | 42 | export CI=azure 43 | export CONFIG=linux64 44 | export DOCKER_IMAGE=quay.io/condaforge/linux-anvil-cos7-x86_64 45 | export AZURE=True 46 | .scripts/run_docker_build.sh 47 | 48 | displayName: Run docker build 49 | name: linux_64_build 50 | 51 | - publish: build_artifacts/linux-64/ 52 | artifact: conda_pkgs_linux 53 | 54 | - publish: build_artifacts/noarch/ 55 | artifact: conda_pkgs_noarch 56 | 57 | - job: linux_64_cuda_112 58 | dependsOn: linux_64 59 | condition: and(not(eq(variables['Build.SourceBranch'], 'refs/heads/main')), eq(dependencies.linux_64.outputs['linux_64_build.NEED_CUDA'], '1')) 60 | pool: 61 | vmImage: ubuntu-latest 62 | timeoutInMinutes: 360 63 | steps: 64 | - script: | 65 | sudo mkdir -p /opt/empty_dir 66 | for d in \ 67 | /opt/ghc \ 68 | /opt/hostedtoolcache \ 69 | /usr/lib/jvm \ 70 | /usr/local/.ghcup \ 71 | /usr/local/android \ 72 | /usr/local/powershell \ 73 | /usr/share/dotnet \ 74 | /usr/share/swift \ 75 | ; do 76 | sudo rsync --stats -a --delete /opt/empty_dir/ $d || true 77 | done 78 | displayName: Manage disk space 79 | 80 | - script: | 81 | # sudo pip install --upgrade pip 82 | sudo pip install setuptools shyaml 83 | displayName: Install dependencies 84 | 85 | - script: | 86 | set -e 87 | 88 | # make sure there is a package directory so that artifact publishing works 89 | mkdir -p build_artifacts/linux-64/ 90 | 91 | export CI=azure 92 | export CONFIG=linux64_cuda112 93 | export DOCKER_IMAGE=quay.io/condaforge/linux-anvil-cuda:11.2 94 | export AZURE=True 95 | .scripts/run_docker_build.sh 96 | 97 | displayName: Run docker build for CUDA 11.2 98 | - publish: build_artifacts/linux-64/ 99 | artifact: conda_pkgs_linux_64_cuda112 100 | 101 | - job: linux_64_cuda_118 102 | dependsOn: linux_64 103 | condition: and(not(eq(variables['Build.SourceBranch'], 'refs/heads/main')), eq(dependencies.linux_64.outputs['linux_64_build.NEED_CUDA'], '1')) 104 | pool: 105 | vmImage: ubuntu-latest 106 | timeoutInMinutes: 360 107 | steps: 108 | - script: | 109 | sudo mkdir -p /opt/empty_dir 110 | for d in \ 111 | /opt/ghc \ 112 | /opt/hostedtoolcache \ 113 | /usr/lib/jvm \ 114 | /usr/local/.ghcup \ 115 | /usr/local/android \ 116 | /usr/local/powershell \ 117 | /usr/share/dotnet \ 118 | /usr/share/swift \ 119 | ; do 120 | sudo rsync --stats -a --delete /opt/empty_dir/ $d || true 121 | done 122 | displayName: Manage disk space 123 | 124 | - script: | 125 | # sudo pip install --upgrade pip 126 | sudo pip install setuptools shyaml 127 | displayName: Install dependencies 128 | 129 | - script: | 130 | set -e 131 | 132 | # make sure there is a package directory so that artifact publishing works 133 | mkdir -p build_artifacts/linux-64/ 134 | 135 | export CI=azure 136 | export CONFIG=linux64_cuda118 137 | export DOCKER_IMAGE=quay.io/condaforge/linux-anvil-cuda:11.8 138 | export AZURE=True 139 | .scripts/run_docker_build.sh 140 | 141 | displayName: Run docker build for CUDA 11.8 142 | - publish: build_artifacts/linux-64/ 143 | artifact: conda_pkgs_linux_64_cuda118 144 | 145 | - job: linux_64_cuda_120 146 | dependsOn: linux_64 147 | condition: and(not(eq(variables['Build.SourceBranch'], 'refs/heads/main')), eq(dependencies.linux_64.outputs['linux_64_build.NEED_CUDA'], '1')) 148 | pool: 149 | vmImage: ubuntu-latest 150 | timeoutInMinutes: 360 151 | steps: 152 | - script: | 153 | sudo mkdir -p /opt/empty_dir 154 | for d in \ 155 | /opt/ghc \ 156 | /opt/hostedtoolcache \ 157 | /usr/lib/jvm \ 158 | /usr/local/.ghcup \ 159 | /usr/local/android \ 160 | /usr/local/powershell \ 161 | /usr/share/dotnet \ 162 | /usr/share/swift \ 163 | ; do 164 | sudo rsync --stats -a --delete /opt/empty_dir/ $d || true 165 | done 166 | displayName: Manage disk space 167 | 168 | - script: | 169 | # sudo pip install --upgrade pip 170 | sudo pip install setuptools shyaml 171 | displayName: Install dependencies 172 | 173 | - script: | 174 | set -e 175 | 176 | # make sure there is a package directory so that artifact publishing works 177 | mkdir -p build_artifacts/linux-64/ 178 | 179 | export CI=azure 180 | export CONFIG=linux64_cuda120 181 | export DOCKER_IMAGE=quay.io/condaforge/linux-anvil-cos7-x86_64 182 | export AZURE=True 183 | .scripts/run_docker_build.sh 184 | 185 | displayName: Run docker build for CUDA 12.0 186 | - publish: build_artifacts/linux-64/ 187 | artifact: conda_pkgs_linux_64_cuda120 188 | -------------------------------------------------------------------------------- /.github/workflows/automate-review-labels.yml: -------------------------------------------------------------------------------- 1 | name: 'Automated review labels' 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | issues: 7 | types: [unlabeled, labeled] 8 | pull_request_target: 9 | types: [unlabeled, labeled] 10 | 11 | permissions: 12 | issues: write # for adding label to an issue 13 | pull-requests: write # for adding label to a pr 14 | 15 | jobs: 16 | 17 | add-review-team-label: 18 | name: 'When pinged, label a PR with review team' 19 | if: > 20 | github.event.issue 21 | && github.event.issue.pull_request 22 | runs-on: ubuntu-latest 23 | steps: 24 | - name: check-teams 25 | id: check_teams 26 | uses: actions/github-script@v6 27 | with: 28 | script: | 29 | const teams = [ 30 | '@conda-forge/staged-recipes', 31 | '@conda-forge/help-c-cpp', 32 | '@conda-forge/help-cdts', 33 | '@conda-forge/help-go', 34 | '@conda-forge/help-java', 35 | '@conda-forge/help-julia', 36 | '@conda-forge/help-nodejs', 37 | '@conda-forge/help-perl', 38 | '@conda-forge/help-python', 39 | '@conda-forge/help-python-c', 40 | '@conda-forge/help-r', 41 | '@conda-forge/help-ruby', 42 | '@conda-forge/help-rust' 43 | ]; 44 | let found_label = false; 45 | for (const team of teams) { 46 | let text = context.payload.comment.body; 47 | const regex = new RegExp(team + '[^\w-]|' + team + '$'); 48 | let result = regex.test(text); 49 | if (result) { 50 | const slug = team.replace("@conda-forge/", ""); 51 | const label = slug.replace("help-", ""); 52 | found_label = true; 53 | github.rest.issues.addLabels({ 54 | issue_number: context.issue.number, 55 | owner: context.repo.owner, 56 | repo: context.repo.repo, 57 | labels: [label, 'review-requested'] 58 | }); 59 | // NOTE: GitHub Actions default token lacks permission to 60 | // assign teams for review; external bot required for 61 | // that feature. 62 | // 63 | https://github.com/conda-forge/staged-recipes/issues/18023#issuecomment-1080451231 64 | console.log(`Somebody mentioned ${slug}.`); 65 | if (label == "staged-recipes") { 66 | github.rest.issues.createComment({ 67 | issue_number: context.issue.number, 68 | owner: context.repo.owner, 69 | repo: context.repo.repo, 70 | body: 'To help direct your pull request to the best reviewers, ' + 71 | 'please mention a topic-specifc team if your recipe matches any of the following: ' + 72 | 'conda-forge/help-c-cpp, ' + 73 | 'conda-forge/help-cdts, ' + 74 | 'conda-forge/help-go, ' + 75 | 'conda-forge/help-java, ' + 76 | 'conda-forge/help-julia, ' + 77 | 'conda-forge/help-nodejs, ' + 78 | 'conda-forge/help-perl, ' + 79 | 'conda-forge/help-python, ' + 80 | 'conda-forge/help-python-c, ' + 81 | 'conda-forge/help-r, ' + 82 | 'conda-forge/help-ruby,' + 83 | 'or ' + 84 | 'conda-forge/help-rust' + 85 | '. ' + 86 | 'Thanks!' 87 | }); 88 | } 89 | } 90 | } 91 | return found_label; 92 | - name: remove-labels 93 | if: > 94 | (steps.check_teams.outputs.result == 'true') 95 | && contains(github.event.issue.labels.*.name, 'Awaiting author contribution') 96 | uses: actions/github-script@v6 97 | with: 98 | script: | 99 | github.rest.issues.removeLabel({ 100 | issue_number: context.issue.number, 101 | owner: context.repo.owner, 102 | repo: context.repo.repo, 103 | name: ['Awaiting author contribution'] 104 | }) 105 | 106 | add-await-when-review-removed: 107 | name: 'Add awaiting-author when review-requested removed' 108 | if: > 109 | github.event.action == 'unlabeled' 110 | && github.event.label.name == 'review-requested' 111 | runs-on: ubuntu-latest 112 | steps: 113 | - name: add-labels 114 | uses: actions/github-script@v6 115 | with: 116 | script: | 117 | github.rest.issues.addLabels({ 118 | issue_number: context.issue.number, 119 | owner: context.repo.owner, 120 | repo: context.repo.repo, 121 | labels: ['Awaiting author contribution'] 122 | }); 123 | 124 | remove-review-when-await-added: 125 | name: 'Removed review-requested when awaiting-author added' 126 | if: > 127 | github.event.action == 'labeled' 128 | && github.event.label.name == 'Awaiting author contribution' 129 | runs-on: ubuntu-latest 130 | steps: 131 | - name: remove-labels 132 | uses: actions/github-script@v6 133 | with: 134 | script: | 135 | github.rest.issues.removeLabel({ 136 | issue_number: context.issue.number, 137 | owner: context.repo.owner, 138 | repo: context.repo.repo, 139 | name: ['review-requested'] 140 | }) 141 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## About 2 | 3 | This repo is a holding area for recipes destined for a conda-forge feedstock repo. To find out more about conda-forge, see https://github.com/conda-forge/conda-smithy. 4 | 5 | [![Join the chat at https://gitter.im/conda-forge/conda-forge.github.io](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/conda-forge/conda-forge.github.io) 6 | 7 | 8 | ## Feedstock conversion status 9 | 10 | [![create_feedstocks](https://github.com/conda-forge/admin-requests/actions/workflows/create_feedstocks.yml/badge.svg)](https://github.com/conda-forge/admin-requests/actions/workflows/create_feedstocks.yml) 11 | 12 | Failures with the above job are often caused by API rate limits from the various services used by conda-forge. 13 | This can result in empty feedstock repositories and will resolve itself automatically. 14 | If the issue persists, support can be found [on Gitter](https://gitter.im/conda-forge/conda-forge.github.io). 15 | 16 | ## Getting started 17 | 18 | 1. Fork this repository. 19 | 2. Make a new folder in `recipes` for your package. Look at the example recipe, our [documentation](http://conda-forge.org/docs/maintainer/adding_pkgs.html#) and the [FAQ](https://github.com/conda-forge/staged-recipes#faq) for help. 20 | 3. Open a pull request. Building of your package will be tested on Windows, Mac and Linux. 21 | 4. When your pull request is merged a new repository, called a feedstock, will be created in the github conda-forge organization, and build/upload of your package will automatically be triggered. Once complete, the package is available on conda-forge. 22 | 23 | 24 | ## Grayskull - recipe generator for Python packages on `pypi` 25 | 26 | For Python packages available on `pypi` it is possible to use [grayskull](https://github.com/conda-incubator/grayskull) to generate the recipe. The user should review the recipe generated, specially the license and dependencies. 27 | 28 | Installing `grayskull`: `conda install -c conda-forge grayskull` 29 | 30 | Generating recipe: `grayskull pypi PACKAGE_NAME_HERE` 31 | 32 | 33 | ## FAQ 34 | 35 | ### 1. **How do I start editing the recipe?** 36 | 37 | Look at one of [these examples](https://github.com/conda-forge/staged-recipes/tree/main/recipes) 38 | in this repository and modify it as necessary. 39 | 40 | Follow the order of the sections in the example recipe. If you make a copy of example recipe, please remove the example's explainer comments from your recipe. Add your own comments to the recipe and build scripts to explain unusual build behavior or recipe options. 41 | 42 | *If there are details you are not sure about please open a pull request. The conda-forge team will be happy to answer your questions.* 43 | 44 | ### 2. **How do I populate the `hash` field?** 45 | 46 | If your package is on [PyPI](https://pypi.org), you can get the sha256 hash from your package's page on PyPI; look for the `SHA256` link next to the download link for your package. 47 | 48 | You can also generate a hash from the command line on Linux (and Mac if you install the necessary tools below). If you go this route, the `sha256` hash is preferable to the `md5` hash. 49 | 50 | To generate the `md5` hash: `md5 your_sdist.tar.gz` 51 | 52 | To generate the `sha256` hash: `openssl sha256 your_sdist.tar.gz` 53 | 54 | You may need the openssl package, available on conda-forge: 55 | `conda install openssl -c conda-forge` 56 | 57 | ### 3. **How do I exclude a platform?** 58 | 59 | Use the `skip` key in the `build` section along with a selector: 60 | 61 | ```yaml 62 | build: 63 | skip: true # [win] 64 | ``` 65 | 66 | A full description of selectors is [in the conda docs](https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#preprocessing-selectors). 67 | 68 | If the package can otherwise be `noarch` you can also skip it by using [virtual packages](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-virtual.html). 69 | 70 | _Note_: As the package will always be built on linux, it needs to be at least available on there. 71 | 72 | 73 | ### 4. **What does the `build: 0` entry mean?** 74 | 75 | The build number is used when the source code for the package has not changed but you need to make a new 76 | build. For example, if one of the dependencies of the package was not properly specified the first time 77 | you build a package, then when you fix the dependency and rebuild the package you should increase the build 78 | number. 79 | 80 | When the package version changes you should reset the build number to `0`. 81 | 82 | ### 5. **Do I have to import all of my unit tests into the recipe's `test` field?** 83 | 84 | No, you do not. The main purpose of the test section is to test whether this conda package was built and installed correctly (not whether the upstream package contains bugs). 85 | 86 | ### 6. **Do all of my package's dependencies have to be in conda(-forge) already?** 87 | 88 | Short answer: yes. Long answer: In principle, as long as your dependencies are in at least one of 89 | your user's conda channels they will be able to install your package. In practice, that is difficult 90 | to manage, and we strive to get all dependencies built in conda-forge. 91 | 92 | ### 7. **When or why do I need to use `{{ PYTHON }} -m pip install . -vv`?** 93 | 94 | This should be the default install line for most Python packages. This is preferable to `python setup.py` because it handles metadata in a `conda`-friendlier way. 95 | 96 | ### 8. **Do I need `bld.bat` and/or `build.sh`?** 97 | 98 | In many cases, no. Python packages almost never need it. If the build can be done with one line you can put it in the `script` line of the `build` section. 99 | 100 | ### 9. What does being a conda-forge feedstock maintainer entail? 101 | 102 | The maintainers "job" is to: 103 | 104 | - keep the feedstock updated by merging maintenance PRs from conda-forge's bots; 105 | - keep the package updated by bumping the version whenever there is a new release; 106 | - answer questions about the package on the feedstock issue tracker. 107 | 108 | ### 10. Why are there recipes already in the `recipes` directory? Should I do something about it? 109 | 110 | When a PR of recipe(s) is ready to go, it is merged into `main`. This will trigger a CI build specially designed to convert the recipe(s). However, for any number of reasons the recipe(s) may not be converted right away. In the interim, the recipe(s) will remain in `main` until they can be converted. There is no action required on the part of recipe contributors to resolve this. Also it should have no impact on any other PRs being proposed. If these recipe(s) pending conversion do cause issues for your submission, please ping `conda-forge/core` for help. 111 | 112 | ### 11. **Some checks failed, but it wasn't my recipe! How do I trigger a rebuild?** 113 | 114 | Sometimes, some of the CI tools' builds fail due to no error within your recipe. If that happens, you can trigger a rebuild by re-creating the last commit and force pushing it to your branch: 115 | 116 | ```bash 117 | # edit your last commit, giving it a new time stamp and hash 118 | # (you can just leave the message as it is) 119 | git commit --amend 120 | # push to github, overwriting your branch 121 | git push -f 122 | ``` 123 | 124 | If the problem was due to scripts in the `staged-recipes` repository, you may be asked to "rebase" once these are fixed. To do so, run: 125 | ```bash 126 | # If you didn't add a remote for conda-forge/staged-recipes yet, also run 127 | # these lines: 128 | # git remote add upstream https://github.com/conda-forge/staged-recipes.git 129 | # git fetch --all 130 | git rebase upstream/main 131 | git push -f 132 | ``` 133 | 134 | ### 12. My pull request passes all checks, but hasn't received any attention. How do I call attention to my PR? What is the customary amount of time to wait? 135 | 136 | 141 | 142 | Thank you very much for putting in this recipe PR! 143 | 144 | This repository is very active, so if you need help with a PR, please let the 145 | right people know. There are language-specific teams for reviewing recipes. 146 | 147 | | Language | Name of review team | 148 | | --------------- | ----------------------------- | 149 | | python | `@conda-forge/help-python` | 150 | | python/c hybrid | `@conda-forge/help-python-c` | 151 | | r | `@conda-forge/help-r` | 152 | | java | `@conda-forge/help-java` | 153 | | nodejs | `@conda-forge/help-nodejs` | 154 | | c/c++ | `@conda-forge/help-c-cpp` | 155 | | perl | `@conda-forge/help-perl` | 156 | | Julia | `@conda-forge/help-julia` | 157 | | ruby | `@conda-forge/help-ruby` | 158 | | other | `@conda-forge/staged-recipes` | 159 | 160 | Once the PR is ready for review, please mention one of the teams above in a 161 | new comment. i.e. `@conda-forge/help-some-language, ready for review!` 162 | Then, a bot will label the PR as 'review-requested'. 163 | 164 | Due to GitHub limitations, first time contributors to conda-forge are unable 165 | to ping conda-forge teams directly, but you can [ask a bot to ping the team][1] 166 | using a special command in a comment on the PR to get the attention of the 167 | `staged-recipes` team. You can also consider asking on our [Gitter channel][2] 168 | if your recipe isn't reviewed promptly. 169 | 170 | [1]: https://conda-forge.org/docs/maintainer/infrastructure.html#conda-forge-admin-please-ping-team 171 | [2]: https://gitter.im/conda-forge/conda-forge.github.io 172 | 173 | All apologies in advance if your recipe PR does not receive prompt attention. 174 | This is a high volume repository and the reviewers are volunteers. Review times vary depending on the number of reviewers on a given language team and may be days or weeks. We are always 175 | looking for more staged-recipe reviewers. If you are interested in volunteering, 176 | please contact a member of @conda-forge/core. We'd love to have your help! 177 | 178 | 179 | ### 13. Is there a changelog for this repository? 180 | 181 | There's no changelog file, but the following `git` command gives a good overview of the recent changes in the repository: 182 | 183 | ```bash 184 | $ git log --merges -- ':!recipes' 185 | ``` 186 | -------------------------------------------------------------------------------- /.ci_support/build_all.py: -------------------------------------------------------------------------------- 1 | import conda.base.context 2 | import conda.core.index 3 | import conda.resolve 4 | import conda_build.api 5 | import conda_index.api 6 | import networkx as nx 7 | from compute_build_graph import construct_graph 8 | import argparse 9 | import re 10 | import os 11 | from collections import OrderedDict 12 | import sys 13 | import subprocess 14 | import yaml 15 | 16 | try: 17 | from ruamel_yaml import BaseLoader, load 18 | except ImportError: 19 | from yaml import BaseLoader, load 20 | 21 | 22 | def get_host_platform(): 23 | from sys import platform 24 | if platform == "linux" or platform == "linux2": 25 | return "linux" 26 | elif platform == "darwin": 27 | return "osx" 28 | elif platform == "win32": 29 | return "win" 30 | 31 | 32 | def get_config_name(arch): 33 | platform = get_host_platform() 34 | return os.environ.get("CONFIG", "{}{}".format(platform, arch)) 35 | 36 | 37 | def build_all(recipes_dir, arch): 38 | folders = list(filter(lambda d: os.path.isdir(os.path.join(recipes_dir, d)), os.listdir(recipes_dir))) 39 | if not folders: 40 | print("Found no recipes to build") 41 | return 42 | 43 | platform = get_host_platform() 44 | script_dir = os.path.dirname(os.path.realpath(__file__)) 45 | variant_config_file = os.path.join(script_dir, "{}.yaml".format(get_config_name(arch))) 46 | 47 | found_cuda = False 48 | found_centos7 = False 49 | for folder in folders: 50 | meta_yaml = os.path.join(recipes_dir, folder, "meta.yaml") 51 | if os.path.exists(meta_yaml): 52 | with(open(meta_yaml, "r", encoding="utf-8")) as f: 53 | text = ''.join(f.readlines()) 54 | if 'cuda' in text: 55 | found_cuda = True 56 | if 'sysroot_linux-64' in text: 57 | found_centos7 = True 58 | cbc = os.path.join(recipes_dir, folder, "conda_build_config.yaml") 59 | if os.path.exists(cbc): 60 | with open(cbc, "r") as f: 61 | lines = f.readlines() 62 | pat = re.compile(r"^([^\#]*?)\s+\#\s\[.*(not\s(linux|unix)|(?/conda-forge.yml' 227 | """ 228 | folders = os.listdir(recipes_dir) 229 | conda_build_tools = [] 230 | for folder in folders: 231 | if folder == "example": 232 | continue 233 | cf = os.path.join(recipes_dir, folder, "conda-forge.yml") 234 | if os.path.exists(cf): 235 | with open(cf, "r") as f: 236 | cfy = yaml.safe_load(f.read()) 237 | conda_build_tools.append(cfy.get("conda_build_tool", "conda-build")) 238 | else: 239 | conda_build_tools.append("conda-build") 240 | if conda_build_tools: 241 | return all([tool == "mambabuild" for tool in conda_build_tools]) 242 | return False 243 | 244 | 245 | def use_mambabuild(): 246 | from boa.cli.mambabuild import prepare 247 | prepare() 248 | 249 | 250 | if __name__ == "__main__": 251 | parser = argparse.ArgumentParser() 252 | parser.add_argument('--arch', default='64', 253 | help='target architecture (64 or 32)') 254 | args = parser.parse_args() 255 | root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 256 | check_recipes_in_correct_dir(root_dir, "recipes") 257 | use_mamba = read_mambabuild(os.path.join(root_dir, "recipes")) 258 | if use_mamba: 259 | use_mambabuild() 260 | subprocess.run("conda clean --all --yes", shell=True, check=True) 261 | build_all(os.path.join(root_dir, "recipes"), args.arch) 262 | -------------------------------------------------------------------------------- /.github/workflows/scripts/create_feedstocks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Convert all recipes into feedstocks. 4 | 5 | This script is to be run in a TravisCI context, with all secret environment 6 | variables defined (STAGING_BINSTAR_TOKEN, GH_TOKEN) 7 | 8 | Such as: 9 | 10 | export GH_TOKEN=$(cat ~/.conda-smithy/github.token) 11 | 12 | """ 13 | from __future__ import print_function 14 | 15 | from conda_build.metadata import MetaData 16 | from conda_smithy.utils import get_feedstock_name_from_meta 17 | from contextlib import contextmanager 18 | from datetime import datetime, timezone 19 | from github import Github, GithubException 20 | import os.path 21 | import shutil 22 | import subprocess 23 | import sys 24 | import tempfile 25 | import traceback 26 | import time 27 | 28 | import requests 29 | from ruamel.yaml import YAML 30 | 31 | # Enable DEBUG to run the diagnostics, without actually creating new feedstocks. 32 | DEBUG = False 33 | 34 | REPO_SKIP_LIST = ["core", "bot", "staged-recipes", "arm-arch", "systems", "ctx"] 35 | 36 | recipe_directory_name = 'recipes' 37 | 38 | 39 | def list_recipes(): 40 | if os.path.isdir(recipe_directory_name): 41 | recipes = os.listdir(recipe_directory_name) 42 | else: 43 | recipes = [] 44 | 45 | for recipe_dir in recipes: 46 | # We don't list the "example" feedstock. It is an example, and is there 47 | # to be helpful. 48 | # .DS_Store is created by macOS to store custom attributes of its 49 | # containing folder. 50 | if recipe_dir in ['example', '.DS_Store']: 51 | continue 52 | path = os.path.abspath(os.path.join(recipe_directory_name, recipe_dir)) 53 | yield path, get_feedstock_name_from_meta(MetaData(path)) 54 | 55 | 56 | @contextmanager 57 | def tmp_dir(*args, **kwargs): 58 | temp_dir = tempfile.mkdtemp(*args, **kwargs) 59 | try: 60 | yield temp_dir 61 | finally: 62 | shutil.rmtree(temp_dir) 63 | 64 | 65 | def repo_exists(gh, organization, name): 66 | # Use the organization provided. 67 | org = gh.get_organization(organization) 68 | try: 69 | org.get_repo(name) 70 | return True 71 | except GithubException as e: 72 | if e.status == 404: 73 | return False 74 | raise 75 | 76 | 77 | def repo_default_branch(gh, organization, name): 78 | # Use the organization provided. 79 | org = gh.get_organization(organization) 80 | try: 81 | repo = org.get_repo(name) 82 | return repo.default_branch 83 | except GithubException as e: 84 | if e.status == 404: 85 | return "main" 86 | raise 87 | 88 | 89 | def _set_default_branch(feedstock_dir, default_branch): 90 | yaml = YAML() 91 | with open(os.path.join(feedstock_dir, "conda-forge.yml"), "r") as fp: 92 | cfg = yaml.load(fp.read()) 93 | 94 | if "github" not in cfg: 95 | cfg["github"] = {} 96 | cfg["github"]["branch_name"] = default_branch 97 | cfg["github"]["tooling_branch_name"] = "main" 98 | 99 | if ( 100 | "upload_on_branch" in cfg 101 | and cfg["upload_on_branch"] != default_branch 102 | and cfg["upload_on_branch"] in ["master", "main"] 103 | ): 104 | cfg["upload_on_branch"] = default_branch 105 | 106 | if "conda_build" not in cfg: 107 | cfg["conda_build"] = {} 108 | 109 | if "error_overlinking" not in cfg["conda_build"]: 110 | cfg["conda_build"]["error_overlinking"] = True 111 | 112 | with open(os.path.join(feedstock_dir, "conda-forge.yml"), "w") as fp: 113 | yaml.dump(cfg, fp) 114 | 115 | 116 | def feedstock_token_exists(organization, name): 117 | r = requests.get( 118 | "https://api.github.com/repos/%s/" 119 | "feedstock-tokens/contents/tokens/%s.json" % (organization, name), 120 | headers={"Authorization": "token %s" % os.environ["GH_TOKEN"]}, 121 | ) 122 | if r.status_code != 200: 123 | return False 124 | else: 125 | return True 126 | 127 | 128 | def print_rate_limiting_info(gh, user): 129 | # Compute some info about our GitHub API Rate Limit. 130 | # Note that it doesn't count against our limit to 131 | # get this info. So, we should be doing this regularly 132 | # to better know when it is going to run out. Also, 133 | # this will help us better understand where we are 134 | # spending it and how to better optimize it. 135 | 136 | # Get GitHub API Rate Limit usage and total 137 | gh_api_remaining = gh.get_rate_limit().core.remaining 138 | gh_api_total = gh.get_rate_limit().core.limit 139 | 140 | # Compute time until GitHub API Rate Limit reset 141 | gh_api_reset_time = gh.get_rate_limit().core.reset 142 | gh_api_reset_time -= datetime.now(timezone.utc) 143 | 144 | print("") 145 | print("GitHub API Rate Limit Info:") 146 | print("---------------------------") 147 | print("token: ", user) 148 | print("Currently remaining {remaining} out of {total}.".format( 149 | remaining=gh_api_remaining, total=gh_api_total)) 150 | print("Will reset in {time}.".format(time=gh_api_reset_time)) 151 | print("") 152 | return gh_api_remaining 153 | 154 | 155 | def sleep_until_reset(gh): 156 | # sleep the job with printing every minute if we are out 157 | # of github api requests 158 | 159 | gh_api_remaining = gh.get_rate_limit().core.remaining 160 | 161 | if gh_api_remaining == 0: 162 | # Compute time until GitHub API Rate Limit reset 163 | gh_api_reset_time = gh.get_rate_limit().core.reset 164 | gh_api_reset_time -= datetime.now(timezone.utc) 165 | 166 | mins_to_sleep = int(gh_api_reset_time.total_seconds() / 60) 167 | mins_to_sleep += 2 168 | 169 | print("Sleeping until GitHub API resets.") 170 | for i in range(mins_to_sleep): 171 | time.sleep(60) 172 | print("slept for minute {curr} out of {tot}.".format( 173 | curr=i+1, tot=mins_to_sleep)) 174 | return True 175 | else: 176 | return False 177 | 178 | 179 | if __name__ == '__main__': 180 | exit_code = 0 181 | 182 | is_merged_pr = os.environ.get('CF_CURRENT_BRANCH') == 'main' 183 | 184 | smithy_conf = os.path.expanduser('~/.conda-smithy') 185 | if not os.path.exists(smithy_conf): 186 | os.mkdir(smithy_conf) 187 | 188 | def write_token(name, token): 189 | with open(os.path.join(smithy_conf, name + '.token'), 'w') as fh: 190 | fh.write(token) 191 | if 'APPVEYOR_TOKEN' in os.environ: 192 | write_token('appveyor', os.environ['APPVEYOR_TOKEN']) 193 | if 'CIRCLE_TOKEN' in os.environ: 194 | write_token('circle', os.environ['CIRCLE_TOKEN']) 195 | if 'AZURE_TOKEN' in os.environ: 196 | write_token('azure', os.environ['AZURE_TOKEN']) 197 | if 'DRONE_TOKEN' in os.environ: 198 | write_token('drone', os.environ['DRONE_TOKEN']) 199 | if 'TRAVIS_TOKEN' in os.environ: 200 | write_token('travis', os.environ['TRAVIS_TOKEN']) 201 | if 'STAGING_BINSTAR_TOKEN' in os.environ: 202 | write_token('anaconda', os.environ['STAGING_BINSTAR_TOKEN']) 203 | 204 | # gh_drone = Github(os.environ['GH_DRONE_TOKEN']) 205 | # gh_drone_remaining = print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 206 | 207 | # gh_travis = Github(os.environ['GH_TRAVIS_TOKEN']) 208 | gh_travis = None 209 | 210 | gh = None 211 | if 'GH_TOKEN' in os.environ: 212 | write_token('github', os.environ['GH_TOKEN']) 213 | gh = Github(os.environ['GH_TOKEN']) 214 | 215 | # Get our initial rate limit info. 216 | gh_remaining = print_rate_limiting_info(gh, 'GH_TOKEN') 217 | 218 | # if we are out, exit early 219 | # if sleep_until_reset(gh): 220 | # sys.exit(1) 221 | 222 | # try the other token maybe? 223 | # if gh_remaining < gh_drone_remaining and gh_remaining < 100: 224 | # write_token('github', os.environ['GH_DRONE_TOKEN']) 225 | # gh = Github(os.environ['GH_DRONE_TOKEN']) 226 | 227 | owner_info = ['--organization', 'conda-forge'] 228 | 229 | print('Calculating the recipes which need to be turned into feedstocks.') 230 | with tmp_dir('__feedstocks') as feedstocks_dir: 231 | feedstock_dirs = [] 232 | for recipe_dir, name in list_recipes(): 233 | if name.lower() in REPO_SKIP_LIST: 234 | continue 235 | if name.lower() == "ctx": 236 | sys.exit(1) 237 | 238 | feedstock_dir = os.path.join(feedstocks_dir, name + '-feedstock') 239 | print('Making feedstock for {}'.format(name)) 240 | try: 241 | subprocess.check_call( 242 | ['conda', 'smithy', 'init', recipe_dir, 243 | '--feedstock-directory', feedstock_dir]) 244 | except subprocess.CalledProcessError: 245 | traceback.print_exception(*sys.exc_info()) 246 | continue 247 | 248 | if not is_merged_pr: 249 | # We just want to check that conda-smithy is doing its 250 | # thing without having any metadata issues. 251 | continue 252 | 253 | subprocess.check_call([ 254 | 'git', 'remote', 'add', 'upstream_with_token', 255 | 'https://conda-forge-manager:{}@github.com/' 256 | 'conda-forge/{}-feedstock'.format( 257 | os.environ['GH_TOKEN'], 258 | name 259 | ) 260 | ], 261 | cwd=feedstock_dir 262 | ) 263 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 264 | 265 | # Sometimes we already have the feedstock created. We need to 266 | # deal with that case. 267 | if repo_exists(gh, 'conda-forge', name + '-feedstock'): 268 | default_branch = repo_default_branch( 269 | gh, 'conda-forge', name + '-feedstock' 270 | ) 271 | subprocess.check_call( 272 | ['git', 'fetch', 'upstream_with_token'], cwd=feedstock_dir) 273 | subprocess.check_call( 274 | ['git', 'branch', '-m', default_branch, 'old'], cwd=feedstock_dir) 275 | try: 276 | subprocess.check_call( 277 | [ 278 | 'git', 'checkout', '-b', default_branch, 279 | 'upstream_with_token/%s' % default_branch 280 | ], 281 | cwd=feedstock_dir) 282 | except subprocess.CalledProcessError: 283 | # Sometimes, we have a repo, but there are no commits on 284 | # it! Just catch that case. 285 | subprocess.check_call( 286 | ['git', 'checkout', '-b', default_branch], cwd=feedstock_dir) 287 | else: 288 | default_branch = "main" 289 | 290 | feedstock_dirs.append([feedstock_dir, name, recipe_dir, default_branch]) 291 | 292 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 293 | 294 | # set the default branch in the conda-forge.yml 295 | _set_default_branch(feedstock_dir, default_branch) 296 | 297 | # now register with github 298 | subprocess.check_call( 299 | ['conda', 'smithy', 'register-github', feedstock_dir] 300 | + owner_info 301 | # hack to help travis work 302 | # + ['--extra-admin-users', gh_travis.get_user().login] 303 | # end of hack 304 | ) 305 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 306 | 307 | if gh: 308 | # Get our final rate limit info. 309 | print_rate_limiting_info(gh, 'GH_TOKEN') 310 | 311 | # drone doesn't run our jobs any more so no reason to do this 312 | # from conda_smithy.ci_register import drone_sync 313 | # print("Running drone sync (can take ~100s)", flush=True) 314 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 315 | # drone_sync() 316 | # for _drone_i in range(10): 317 | # print( 318 | # "syncing drone - %d seconds left" % (10*(10 - _drone_i)), 319 | # flush=True, 320 | # ) 321 | # time.sleep(10) # actually wait 322 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 323 | 324 | # Break the previous loop to allow the TravisCI registering 325 | # to take place only once per function call. 326 | # Without this, intermittent failures to synch the TravisCI repos ensue. 327 | # Hang on to any CI registration errors that occur and raise them at the end. 328 | for num, (feedstock_dir, name, recipe_dir, default_branch) in enumerate( 329 | feedstock_dirs 330 | ): 331 | if name.lower() in REPO_SKIP_LIST: 332 | continue 333 | print("\n\nregistering CI services for %s..." % name) 334 | if num >= 10: 335 | exit_code = 0 336 | break 337 | # Try to register each feedstock with CI. 338 | # However sometimes their APIs have issues for whatever reason. 339 | # In order to bank our progress, we note the error and handle it. 340 | # After going through all the recipes and removing the converted ones, 341 | # we fail the build so that people are aware that things did not clear. 342 | 343 | # hack to help travis work 344 | # from conda_smithy.ci_register import add_project_to_travis 345 | # add_project_to_travis("conda-forge", name + "-feedstock") 346 | # print_rate_limiting_info(gh_travis, 'GH_TRAVIS_TOKEN') 347 | # end of hack 348 | 349 | try: 350 | subprocess.check_call( 351 | ['conda', 'smithy', 'register-ci', '--without-appveyor', 352 | '--without-circle', '--without-drone', '--without-cirun', 353 | '--without-webservice', '--feedstock_directory', 354 | feedstock_dir] + owner_info) 355 | subprocess.check_call( 356 | ['conda', 'smithy', 'rerender', '--no-check-uptodate'], cwd=feedstock_dir) 357 | except subprocess.CalledProcessError: 358 | exit_code = 0 359 | traceback.print_exception(*sys.exc_info()) 360 | continue 361 | 362 | # slow down so we make sure we are registered 363 | for i in range(1, 13): 364 | time.sleep(10) 365 | print("Waiting for registration: {i} s".format(i=i*10)) 366 | 367 | # if we get here, now we make the feedstock token and add the staging token 368 | print("making the feedstock token and adding the staging binstar token") 369 | try: 370 | if not feedstock_token_exists("conda-forge", name + "-feedstock"): 371 | subprocess.check_call( 372 | ['conda', 'smithy', 'generate-feedstock-token', 373 | '--feedstock_directory', feedstock_dir] + owner_info) 374 | subprocess.check_call( 375 | ['conda', 'smithy', 'register-feedstock-token', 376 | '--without-circle', '--without-drone', 377 | '--feedstock_directory', feedstock_dir] + owner_info) 378 | 379 | # add staging token env var to all CI probiders except appveyor 380 | # and azure 381 | # azure has it by default and appveyor is not used 382 | subprocess.check_call( 383 | ['conda', 'smithy', 'rotate-binstar-token', 384 | '--without-appveyor', '--without-azure', 385 | "--without-github-actions", '--without-circle', '--without-drone', 386 | '--token_name', 'STAGING_BINSTAR_TOKEN'], 387 | cwd=feedstock_dir) 388 | 389 | yaml = YAML() 390 | with open(os.path.join(feedstock_dir, "conda-forge.yml"), "r") as fp: 391 | _cfg = yaml.load(fp.read()) 392 | _cfg["conda_forge_output_validation"] = True 393 | with open(os.path.join(feedstock_dir, "conda-forge.yml"), "w") as fp: 394 | yaml.dump(_cfg, fp) 395 | subprocess.check_call( 396 | ["git", "add", "conda-forge.yml"], 397 | cwd=feedstock_dir 398 | ) 399 | subprocess.check_call( 400 | ['conda', 'smithy', 'rerender', '--no-check-uptodate'], cwd=feedstock_dir) 401 | except subprocess.CalledProcessError: 402 | exit_code = 0 403 | traceback.print_exception(*sys.exc_info()) 404 | continue 405 | 406 | print("making a commit and pushing...") 407 | subprocess.check_call( 408 | ['git', 'commit', '--allow-empty', '-am', 409 | "Re-render the feedstock after CI registration."], cwd=feedstock_dir) 410 | for i in range(5): 411 | try: 412 | # Capture the output, as it may contain the GH_TOKEN. 413 | out = subprocess.check_output( 414 | [ 415 | 'git', 'push', 'upstream_with_token', 416 | 'HEAD:%s' % default_branch 417 | ], 418 | cwd=feedstock_dir, 419 | stderr=subprocess.STDOUT) 420 | break 421 | except subprocess.CalledProcessError: 422 | pass 423 | 424 | # Likely another job has already pushed to this repo. 425 | # Place our changes on top of theirs and try again. 426 | out = subprocess.check_output( 427 | ['git', 'fetch', 'upstream_with_token', default_branch], 428 | cwd=feedstock_dir, 429 | stderr=subprocess.STDOUT) 430 | try: 431 | subprocess.check_call( 432 | [ 433 | 'git', 'rebase', 434 | 'upstream_with_token/%s' % default_branch, default_branch 435 | ], 436 | cwd=feedstock_dir) 437 | except subprocess.CalledProcessError: 438 | # Handle rebase failure by choosing the changes in default_branch. 439 | subprocess.check_call( 440 | ['git', 'checkout', default_branch, '--', '.'], 441 | cwd=feedstock_dir) 442 | subprocess.check_call( 443 | ['git', 'rebase', '--continue'], cwd=feedstock_dir) 444 | 445 | # Remove this recipe from the repo. 446 | if is_merged_pr: 447 | subprocess.check_call(['git', 'rm', '-rf', recipe_dir]) 448 | # hack to help travis work 449 | # from conda_smithy.ci_register import travis_cleanup 450 | # travis_cleanup("conda-forge", name + "-feedstock") 451 | # end of hack 452 | 453 | if gh: 454 | # Get our final rate limit info. 455 | print_rate_limiting_info(gh, 'GH_TOKEN') 456 | 457 | # Update status based on the remote. 458 | subprocess.check_call(['git', 'stash', '--keep-index', '--include-untracked']) 459 | subprocess.check_call(['git', 'fetch']) 460 | # CBURR: Debugging 461 | subprocess.check_call(['git', 'status']) 462 | subprocess.check_call(['git', 'rebase', '--autostash']) 463 | subprocess.check_call(['git', 'add', '.']) 464 | try: 465 | subprocess.check_call(['git', 'stash', 'pop']) 466 | except subprocess.CalledProcessError: 467 | # In case there was nothing to stash. 468 | # Finish quietly. 469 | pass 470 | 471 | # Parse `git status --porcelain` to handle some merge conflicts and 472 | # generate the removed recipe list. 473 | changed_files = subprocess.check_output( 474 | ['git', 'status', '--porcelain', recipe_directory_name], 475 | universal_newlines=True) 476 | changed_files = changed_files.splitlines() 477 | 478 | # Add all files from AU conflicts. They are new files that we 479 | # weren't tracking previously. 480 | # Adding them resolves the conflict and doesn't actually add anything to the index. 481 | new_file_conflicts = filter(lambda _: _.startswith("AU "), changed_files) 482 | new_file_conflicts = map( 483 | lambda _: _.replace("AU", "", 1).lstrip(), new_file_conflicts) 484 | for each_new_file in new_file_conflicts: 485 | subprocess.check_call(['git', 'add', each_new_file]) 486 | 487 | # Generate a fresh listing of recipes removed. 488 | # 489 | # * Each line we get back is a change to a file in the recipe directory. 490 | # * We narrow the list down to recipes that are staged for deletion 491 | # (ignores examples). 492 | # * Then we clean up the list so that it only has the recipe names. 493 | removed_recipes = filter(lambda _: _.startswith("D "), changed_files) 494 | removed_recipes = map(lambda _: _.replace("D", "", 1).lstrip(), removed_recipes) 495 | removed_recipes = map( 496 | lambda _: os.path.relpath(_, recipe_directory_name), removed_recipes) 497 | removed_recipes = map(lambda _: _.split(os.path.sep)[0], removed_recipes) 498 | removed_recipes = sorted(set(removed_recipes)) 499 | 500 | # Commit any removed packages. 501 | subprocess.check_call(['git', 'status']) 502 | if removed_recipes: 503 | msg = ('Removed recipe{s} ({}) after converting into feedstock{s}.' 504 | ''.format(', '.join(removed_recipes), 505 | s=('s' if len(removed_recipes) > 1 else ''))) 506 | msg += ' [ci skip]' 507 | if is_merged_pr: 508 | # Capture the output, as it may contain the GH_TOKEN. 509 | out = subprocess.check_output( 510 | ['git', 'remote', 'add', 'upstream_with_token', 511 | 'https://x-access-token:{}@github.com/' 512 | 'conda-forge/staged-recipes'.format(os.environ['GH_TOKEN'])], 513 | stderr=subprocess.STDOUT) 514 | subprocess.check_call(['git', 'commit', '-m', msg]) 515 | # Capture the output, as it may contain the GH_TOKEN. 516 | branch = os.environ.get('CF_CURRENT_BRANCH') 517 | out = subprocess.check_output( 518 | ['git', 'push', 'upstream_with_token', 'HEAD:%s' % branch], 519 | stderr=subprocess.STDOUT) 520 | else: 521 | print('Would git commit, with the following message: \n {}'.format(msg)) 522 | 523 | if gh: 524 | # Get our final rate limit info. 525 | print_rate_limiting_info(gh, 'GH_TOKEN') 526 | # if gh_drone: 527 | # print_rate_limiting_info(gh_drone, 'GH_DRONE_TOKEN') 528 | # if gh_travis: 529 | # print_rate_limiting_info(gh_travis, 'GH_TRAVIS_TOKEN') 530 | 531 | sys.exit(exit_code) 532 | -------------------------------------------------------------------------------- /.ci_support/compute_build_graph.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Copyright (c) 2016, Continuum Analytics, Inc. 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright notice, this 11 | list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above copyright notice, 14 | this list of conditions and the following disclaimer in the documentation 15 | and/or other materials provided with the distribution. 16 | 17 | * Neither the name of Continuum Analytics nor the names of its 18 | contributors may be used to endorse or promote products derived from 19 | this software without specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 25 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 26 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 28 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 29 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | """ 32 | from __future__ import print_function, division 33 | 34 | import logging 35 | import os 36 | import pkg_resources 37 | import re 38 | import subprocess 39 | import functools 40 | from functools import lru_cache 41 | 42 | from frozendict import frozendict 43 | import networkx as nx 44 | from conda.models.match_spec import MatchSpec 45 | from conda.models.records import PackageRecord 46 | from conda_build import api 47 | from conda_build.metadata import find_recipe, MetaData 48 | 49 | from conda_build.utils import HashableDict 50 | 51 | 52 | log = logging.getLogger(__file__) 53 | CONDA_BUILD_CACHE = os.environ.get("CONDA_BUILD_CACHE") 54 | hash_length = api.Config().hash_length 55 | 56 | 57 | # https://stackoverflow.com/questions/6358481/using-functools-lru-cache-with-dictionary-arguments 58 | def freezeargs(func): 59 | """Convert a mutable dictionary into immutable. 60 | Useful to be compatible with cache 61 | """ 62 | 63 | @functools.wraps(func) 64 | def wrapped(*args, **kwargs): 65 | args = (frozendict(arg) if isinstance(arg, dict) else arg for arg in args) 66 | kwargs = {k: frozendict(v) if isinstance(v, dict) else v for k, v in kwargs.items()} 67 | return func(*args, **kwargs) 68 | return wrapped 69 | 70 | 71 | def package_key(metadata, worker_label, run='build'): 72 | # get the build string from whatever conda-build makes of the configuration 73 | used_loop_vars = metadata.get_used_loop_vars() 74 | build_vars = '-'.join([k + '_' + str(metadata.config.variant[k]) for k in used_loop_vars 75 | if k != 'target_platform']) 76 | # kind of a special case. Target platform determines a lot of output behavior, but may not be 77 | # explicitly listed in the recipe. 78 | tp = metadata.config.variant.get('target_platform') 79 | if tp and tp != metadata.config.subdir and 'target_platform' not in build_vars: 80 | build_vars += '-target_' + tp 81 | key = [metadata.name(), metadata.version()] 82 | if build_vars: 83 | key.append(build_vars) 84 | key.extend(['on', worker_label]) 85 | key = "-".join(key) 86 | if run == 'test': 87 | key = '-'.join(('c3itest', key)) 88 | return key 89 | 90 | 91 | def _git_changed_files(git_rev, stop_rev=None, git_root=''): 92 | if not git_root: 93 | git_root = os.getcwd() 94 | if stop_rev: 95 | git_rev = "{0}..{1}".format(git_rev, stop_rev) 96 | print("Changed files from:", git_rev, stop_rev, git_root) 97 | output = subprocess.check_output(['git', '-C', git_root, 'diff-tree', 98 | '--no-commit-id', '--name-only', '-r', git_rev]) 99 | files = output.decode().splitlines() 100 | return files 101 | 102 | 103 | def _get_base_folders(base_dir, changed_files): 104 | recipe_dirs = [] 105 | for f in changed_files: 106 | # only consider files that come from folders 107 | if '/' in f: 108 | f = f.split('/')[0] 109 | try: 110 | find_recipe(os.path.join(base_dir, f)) 111 | recipe_dirs.append(f) 112 | except IOError: 113 | pass 114 | return recipe_dirs 115 | 116 | 117 | def git_changed_submodules(git_rev='HEAD@{1}', stop_rev=None, git_root='.'): 118 | if stop_rev is not None: 119 | git_rev = "{0}..{1}".format(git_rev, stop_rev) 120 | diff_script = pkg_resources.resource_filename('conda_concourse_ci', 'diff-script.sh') 121 | 122 | diff = subprocess.check_output(['bash', diff_script, git_rev], 123 | cwd=git_root, universal_newlines=True) 124 | 125 | submodule_changed_files = [line.split() for line in diff.splitlines()] 126 | 127 | submodules_with_recipe_changes = [] 128 | for submodule in submodule_changed_files: 129 | for file in submodule: 130 | if 'recipe/' in file and submodule[0] not in submodules_with_recipe_changes: 131 | submodules_with_recipe_changes.append(submodule[0]) 132 | 133 | return submodules_with_recipe_changes 134 | 135 | 136 | def git_new_submodules(git_rev='HEAD@{1}', stop_rev=None, git_root='.'): 137 | if stop_rev is not None: 138 | git_rev = "{0}..{1}".format(git_rev, stop_rev) 139 | 140 | new_submodule_script = pkg_resources.resource_filename('conda_concourse_ci', 141 | 'new-submodule-script.sh') 142 | 143 | diff = subprocess.check_output(['bash', new_submodule_script, git_rev], 144 | cwd=git_root, universal_newlines=True) 145 | 146 | return diff.splitlines() 147 | 148 | 149 | def git_renamed_folders(git_rev='HEAD@{1}', stop_rev=None, git_root='.'): 150 | if stop_rev is not None: 151 | git_rev = "{0}..{1}".format(git_rev, stop_rev) 152 | 153 | rename_script = pkg_resources.resource_filename('conda_concourse_ci', 154 | 'rename-script.sh') 155 | 156 | renamed_files = subprocess.check_output(['bash', rename_script], cwd=git_root, 157 | universal_newlines=True).splitlines() 158 | 159 | return renamed_files 160 | 161 | 162 | def git_changed_recipes(git_rev='HEAD@{1}', stop_rev=None, git_root='.'): 163 | """ 164 | Get the list of files changed in a git revision and return a list of 165 | package directories that have been modified. 166 | 167 | git_rev: if stop_rev is not provided, this represents the changes 168 | introduced by the given git rev. It is equivalent to 169 | git_rev=SOME_REV@{1} and stop_rev=SOME_REV 170 | 171 | stop_rev: when provided, this is the end of a range of revisions to 172 | consider. git_rev becomes the start revision. Note that the 173 | start revision is *one before* the actual start of examining 174 | commits for changes. In other words: 175 | 176 | git_rev=SOME_REV@{1} and stop_rev=SOME_REV => only SOME_REV 177 | git_rev=SOME_REV@{2} and stop_rev=SOME_REV => two commits, SOME_REV and the 178 | one before it 179 | """ 180 | changed_files = _git_changed_files(git_rev, stop_rev, git_root) 181 | recipe_dirs = _get_base_folders(git_root, changed_files) 182 | changed_submodules = git_changed_submodules(git_rev, stop_rev, git_root) 183 | new_submodules = git_new_submodules(git_rev, stop_rev, git_root) 184 | renamed_folders = git_renamed_folders(git_rev, stop_rev, git_root) 185 | return recipe_dirs + changed_submodules + new_submodules + renamed_folders 186 | 187 | 188 | def _deps_to_version_dict(deps): 189 | d = {} 190 | for x in deps: 191 | x = x.strip().split() 192 | if len(x) == 3: 193 | d[x[0]] = (x[1], x[2]) 194 | elif len(x) == 2: 195 | d[x[0]] = (x[1], 'any') 196 | else: 197 | d[x[0]] = ('any', 'any') 198 | return d 199 | 200 | 201 | def get_build_deps(meta): 202 | build_reqs = meta.get_value('requirements/build') 203 | if not build_reqs: 204 | build_reqs = [] 205 | return _deps_to_version_dict(build_reqs) 206 | 207 | 208 | def get_run_test_deps(meta): 209 | run_reqs = meta.get_value('requirements/run') 210 | if not run_reqs: 211 | run_reqs = [] 212 | test_reqs = meta.get_value('test/requires') 213 | if not test_reqs: 214 | test_reqs = [] 215 | return _deps_to_version_dict(run_reqs + test_reqs) 216 | 217 | 218 | _rendered_recipes = {} 219 | 220 | 221 | @freezeargs 222 | @lru_cache(maxsize=None) 223 | def _get_or_render_metadata(meta_file_or_recipe_dir, worker, finalize, config=None): 224 | global _rendered_recipes 225 | platform = worker['platform'] 226 | arch = str(worker['arch']) 227 | if (meta_file_or_recipe_dir, platform, arch) not in _rendered_recipes: 228 | print("rendering {0} for {1}".format(meta_file_or_recipe_dir, worker['label'])) 229 | _rendered_recipes[(meta_file_or_recipe_dir, platform, arch)] = \ 230 | api.render(meta_file_or_recipe_dir, platform=platform, arch=arch, 231 | verbose=False, permit_undefined_jinja=True, 232 | bypass_env_check=True, config=config, finalize=finalize) 233 | return _rendered_recipes[(meta_file_or_recipe_dir, platform, arch)] 234 | 235 | 236 | def add_recipe_to_graph(recipe_dir, graph, run, worker, conda_resolve, 237 | recipes_dir=None, config=None, finalize=False): 238 | try: 239 | print(recipe_dir, worker, config, finalize, flush=True) 240 | rendered = _get_or_render_metadata(recipe_dir, worker, config=config, finalize=finalize) 241 | except (IOError, SystemExit) as e: 242 | log.exception('invalid recipe dir: %s', recipe_dir) 243 | raise 244 | 245 | name = None 246 | for (metadata, _, _) in rendered: 247 | name = package_key(metadata, worker['label'], run) 248 | 249 | if metadata.skip(): 250 | continue 251 | 252 | if name not in graph.nodes(): 253 | graph.add_node(name, meta=metadata, worker=worker) 254 | add_dependency_nodes_and_edges(name, graph, run, worker, conda_resolve, config=config, 255 | recipes_dir=recipes_dir, finalize=finalize) 256 | 257 | # # add the test equivalent at the same time. This is so that expanding can find it. 258 | # if run == 'build': 259 | # add_recipe_to_graph(recipe_dir, graph, 'test', worker, conda_resolve, 260 | # recipes_dir=recipes_dir) 261 | # test_key = package_key(metadata, worker['label']) 262 | # graph.add_edge(test_key, name) 263 | # upload_key = package_key(metadata, worker['label']) 264 | # graph.add_node(upload_key, meta=metadata, worker=worker) 265 | # graph.add_edge(upload_key, test_key) 266 | 267 | return name 268 | 269 | 270 | def match_peer_job(target_matchspec, other_m, this_m=None): 271 | """target_matchspec comes from the recipe. target_variant is the variant from the recipe whose 272 | deps we are matching. m is the peer job, which must satisfy conda and also have matching keys 273 | for any keys that are shared between target_variant and m.config.variant""" 274 | match_dict = {'name': other_m.name(), 275 | 'version': other_m.version(), 276 | 'build': _fix_any(other_m.build_id(), other_m.config), } 277 | match_record = PackageRecord( 278 | name=match_dict['name'], 279 | version=match_dict['version'], 280 | build=match_dict['build'], 281 | build_number=int(other_m.build_number() or 0), 282 | channel=None, 283 | ) 284 | matchspec_matches = target_matchspec.match(match_record) 285 | 286 | variant_matches = True 287 | if this_m: 288 | other_m_used_vars = other_m.get_used_loop_vars() 289 | for v in this_m.get_used_loop_vars(): 290 | if v in other_m_used_vars: 291 | variant_matches &= this_m.config.variant[v] == other_m.config.variant[v] 292 | return matchspec_matches and variant_matches 293 | 294 | 295 | def add_intradependencies(graph): 296 | """ensure that downstream packages wait for upstream build/test (not use existing 297 | available packages)""" 298 | for node in graph.nodes(): 299 | if 'meta' not in graph.nodes[node]: 300 | continue 301 | # get build dependencies 302 | m = graph.nodes[node]['meta'] 303 | # this is pretty hard. Realistically, we would want to know 304 | # what the build and host platforms are on the build machine. 305 | # However, all we know right now is what machine we're actually 306 | # on (the one calculating the graph). 307 | 308 | test_requires = m.meta.get('test', {}).get('requires', []) 309 | 310 | log.info("node: {}".format(node)) 311 | log.info(" build: {}".format(m.ms_depends('build'))) 312 | log.info(" host: {}".format(m.ms_depends('host'))) 313 | log.info(" run: {}".format(m.ms_depends('run'))) 314 | log.info(" test: {}".format(test_requires)) 315 | 316 | deps = set(m.ms_depends('build') + m.ms_depends('host') + m.ms_depends('run') + 317 | [MatchSpec(dep) for dep in test_requires or []]) 318 | 319 | for dep in deps: 320 | name_matches = (n for n in graph.nodes() if graph.nodes[n]['meta'].name() == dep.name) 321 | for matching_node in name_matches: 322 | # are any of these build dependencies also nodes in our graph? 323 | if (match_peer_job(MatchSpec(dep), 324 | graph.nodes[matching_node]['meta'], 325 | m) and 326 | (node, matching_node) not in graph.edges()): 327 | # add edges if they don't already exist 328 | graph.add_edge(node, matching_node) 329 | 330 | 331 | def collapse_subpackage_nodes(graph): 332 | """Collapse all subpackage nodes into their parent recipe node 333 | 334 | We get one node per output, but a given recipe can have multiple outputs. It's important 335 | for dependency ordering in the graph that the outputs exist independently, but once those 336 | dependencies are established, we need to collapse subpackages down to a single job for the 337 | top-level recipe.""" 338 | # group nodes by their recipe path first, then within those groups by their variant 339 | node_groups = {} 340 | for node in graph.nodes(): 341 | if 'meta' in graph.nodes[node]: 342 | meta = graph.nodes[node]['meta'] 343 | meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path'] 344 | master = False 345 | 346 | master_meta = MetaData(meta_path, config=meta.config) 347 | if master_meta.name() == meta.name(): 348 | master = True 349 | group = node_groups.get(meta_path, {}) 350 | subgroup = group.get(HashableDict(meta.config.variant), {}) 351 | if master: 352 | if 'master' in subgroup: 353 | raise ValueError("tried to set more than one node in a group as master") 354 | subgroup['master'] = node 355 | else: 356 | sps = subgroup.get('subpackages', []) 357 | sps.append(node) 358 | subgroup['subpackages'] = sps 359 | group[HashableDict(meta.config.variant)] = subgroup 360 | node_groups[meta_path] = group 361 | 362 | for recipe_path, group in node_groups.items(): 363 | for variant, subgroup in group.items(): 364 | # if no node is the top-level recipe (only outputs, no top-level output), need to obtain 365 | # package/name from recipe given by common recipe path. 366 | subpackages = subgroup.get('subpackages') 367 | if 'master' not in subgroup: 368 | sp0 = graph.nodes[subpackages[0]] 369 | master_meta = MetaData(recipe_path, config=sp0['meta'].config) 370 | worker = sp0['worker'] 371 | master_key = package_key(master_meta, worker['label']) 372 | graph.add_node(master_key, meta=master_meta, worker=worker) 373 | master = graph.nodes[master_key] 374 | else: 375 | master = subgroup['master'] 376 | master_key = package_key(graph.nodes[master]['meta'], 377 | graph.nodes[master]['worker']['label']) 378 | # fold in dependencies for all of the other subpackages within a group. This is just 379 | # the intersection of the edges between all nodes. Store this on the "master" node. 380 | if subpackages: 381 | remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages] 382 | for edge in remap_edges: 383 | # make sure not to add references to yourself 384 | if edge[0] != master_key: 385 | graph.add_edge(edge[0], master_key) 386 | graph.remove_edge(*edge) 387 | 388 | # remove nodes that have been folded into master nodes 389 | for subnode in subpackages: 390 | graph.remove_node(subnode) 391 | 392 | 393 | def construct_graph(recipes_dir, worker, run, conda_resolve, folders=(), 394 | git_rev=None, stop_rev=None, matrix_base_dir=None, 395 | config=None, finalize=False): 396 | ''' 397 | Construct a directed graph of dependencies from a directory of recipes 398 | 399 | run: whether to use build or run/test requirements for the graph. Avoids cycles. 400 | values: 'build' or 'test'. Actually, only 'build' matters - otherwise, it's 401 | run/test for any other value. 402 | ''' 403 | matrix_base_dir = matrix_base_dir or recipes_dir 404 | if not os.path.isabs(recipes_dir): 405 | recipes_dir = os.path.normpath(os.path.join(os.getcwd(), recipes_dir)) 406 | assert os.path.isdir(recipes_dir) 407 | 408 | if not folders: 409 | if not git_rev: 410 | git_rev = 'HEAD' 411 | 412 | folders = git_changed_recipes(git_rev, stop_rev=stop_rev, 413 | git_root=recipes_dir) 414 | 415 | graph = nx.DiGraph() 416 | for folder in folders: 417 | recipe_dir = os.path.join(recipes_dir, folder) 418 | if not os.path.isdir(recipe_dir): 419 | raise ValueError("Specified folder {} does not exist".format(recipe_dir)) 420 | add_recipe_to_graph(recipe_dir, graph, run, worker, conda_resolve, 421 | recipes_dir, config=config, finalize=finalize) 422 | add_intradependencies(graph) 423 | collapse_subpackage_nodes(graph) 424 | return graph 425 | 426 | 427 | def _fix_any(value, config): 428 | value = re.sub('any(?:h[0-9a-f]{%d})?' % config.hash_length, '', value) 429 | return value 430 | 431 | 432 | @lru_cache(maxsize=None) 433 | def _installable(name, version, build_string, config, conda_resolve): 434 | """Can Conda install the package we need?""" 435 | ms = MatchSpec( 436 | " ".join( 437 | [name, _fix_any(version, config), _fix_any(build_string, config)] 438 | ) 439 | ) 440 | installable = conda_resolve.find_matches(ms) 441 | if not installable: 442 | log.warn("Dependency {name}, version {ver} is not installable from your " 443 | "channels: {channels} with subdir {subdir}. Seeing if we can build it..." 444 | .format(name=name, ver=version, channels=config.channel_urls, 445 | subdir=config.host_subdir)) 446 | return installable 447 | 448 | 449 | def _buildable(name, version, recipes_dir, worker, config, finalize): 450 | """Does the recipe that we have available produce the package we need?""" 451 | possible_dirs = os.listdir(recipes_dir) 452 | packagename_re = re.compile(r'%s(?:\-[0-9]+[\.0-9\_\-a-zA-Z]*)?$' % name) 453 | likely_dirs = (dirname for dirname in possible_dirs if 454 | (os.path.isdir(os.path.join(recipes_dir, dirname)) and 455 | packagename_re.match(dirname))) 456 | metadata_tuples = [m for path in likely_dirs 457 | for (m, _, _) in _get_or_render_metadata(os.path.join(recipes_dir, 458 | path), worker, finalize=finalize)] 459 | 460 | # this is our target match 461 | ms = MatchSpec(" ".join([name, _fix_any(version, config)])) 462 | available = False 463 | for m in metadata_tuples: 464 | available = match_peer_job(ms, m) 465 | if available: 466 | break 467 | return m.meta_path if available else False 468 | 469 | 470 | def add_dependency_nodes_and_edges(node, graph, run, worker, conda_resolve, recipes_dir=None, 471 | finalize=False, config=None): 472 | '''add build nodes for any upstream deps that are not yet installable 473 | 474 | changes graph in place. 475 | ''' 476 | metadata = graph.nodes[node]['meta'] 477 | # for plain test runs, ignore build reqs. 478 | deps = get_run_test_deps(metadata) 479 | recipes_dir = recipes_dir or os.getcwd() 480 | 481 | # cross: need to distinguish between build_subdir (build reqs) and host_subdir 482 | if run == 'build': 483 | deps.update(get_build_deps(metadata)) 484 | 485 | for dep, (version, build_str) in deps.items(): 486 | # we don't need worker info in _installable because it is already part of conda_resolve 487 | if not _installable(dep, version, build_str, metadata.config, conda_resolve): 488 | recipe_dir = _buildable(dep, version, recipes_dir, worker, metadata.config, 489 | finalize=finalize) 490 | if not recipe_dir: 491 | continue 492 | # raise ValueError("Dependency {} is not installable, and recipe (if " 493 | # " available) can't produce desired version ({})." 494 | # .format(dep, version)) 495 | dep_name = add_recipe_to_graph(recipe_dir, graph, 'build', worker, 496 | conda_resolve, recipes_dir, config=config, finalize=finalize) 497 | if not dep_name: 498 | raise ValueError("Tried to build recipe {0} as dependency, which is skipped " 499 | "in meta.yaml".format(recipe_dir)) 500 | graph.add_edge(node, dep_name) 501 | 502 | 503 | def expand_run_upstream(graph, conda_resolve, worker, run, steps=0, max_downstream=5, 504 | recipes_dir=None, matrix_base_dir=None): 505 | pass 506 | 507 | 508 | def expand_run(graph, conda_resolve, worker, run, steps=0, max_downstream=5, 509 | recipes_dir=None, matrix_base_dir=None, finalize=False): 510 | """Apply the build label to any nodes that need (re)building or testing. 511 | 512 | "need rebuilding" means both packages that our target package depends on, 513 | but are not yet built, as well as packages that depend on our target 514 | package. For the latter, you can specify how many dependencies deep (steps) 515 | to follow that chain, since it can be quite large. 516 | 517 | If steps is -1, all downstream dependencies are rebuilt or retested 518 | """ 519 | downstream = 0 520 | initial_nodes = len(graph.nodes()) 521 | 522 | # for build, we get test automatically. Give people the max_downstream in terms 523 | # of packages, not tasks 524 | # if run == 'build': 525 | # max_downstream *= 2 526 | 527 | def expand_step(task_graph, full_graph, downstream): 528 | for node in task_graph.nodes(): 529 | for predecessor in full_graph.predecessors(node): 530 | if max_downstream < 0 or (downstream - initial_nodes) < max_downstream: 531 | add_recipe_to_graph( 532 | os.path.dirname(full_graph.nodes[predecessor]['meta'].meta_path), 533 | task_graph, run=run, worker=worker, conda_resolve=conda_resolve, 534 | recipes_dir=recipes_dir, finalize=finalize) 535 | downstream += 1 536 | return len(graph.nodes()) 537 | 538 | # starting from our initial collection of dirty nodes, trace the tree down to packages 539 | # that depend on the dirty nodes. These packages may need to be rebuilt, or perhaps 540 | # just tested. The 'run' argument determines which. 541 | 542 | if steps != 0: 543 | if not recipes_dir: 544 | raise ValueError("recipes_dir is necessary if steps != 0. " 545 | "Please pass it as an argument.") 546 | # here we need to fully populate a graph that has the right build or run/test deps. 547 | # We don't create this elsewhere because it is unnecessary and costly. 548 | 549 | # get all immediate subdirectories 550 | other_top_dirs = [d for d in os.listdir(recipes_dir) 551 | if os.path.isdir(os.path.join(recipes_dir, d)) and 552 | not d.startswith('.')] 553 | recipe_dirs = [] 554 | for recipe_dir in other_top_dirs: 555 | try: 556 | find_recipe(os.path.join(recipes_dir, recipe_dir)) 557 | recipe_dirs.append(recipe_dir) 558 | except IOError: 559 | pass 560 | 561 | # constructing the graph for build will automatically also include the test deps 562 | full_graph = construct_graph(recipes_dir, worker, 'build', folders=recipe_dirs, 563 | matrix_base_dir=matrix_base_dir, conda_resolve=conda_resolve) 564 | 565 | if steps >= 0: 566 | for step in range(steps): 567 | downstream = expand_step(graph, full_graph, downstream) 568 | else: 569 | while True: 570 | nodes = graph.nodes() 571 | downstream = expand_step(graph, full_graph, downstream) 572 | if nodes == graph.nodes(): 573 | break 574 | 575 | 576 | def order_build(graph): 577 | ''' 578 | Assumes that packages are in graph. 579 | Builds a temporary graph of relevant nodes and returns it topological sort. 580 | 581 | Relevant nodes selected in a breadth first traversal sourced at each pkg 582 | in packages. 583 | ''' 584 | reorder_cyclical_test_dependencies(graph) 585 | try: 586 | order = list(nx.topological_sort(graph)) 587 | order.reverse() 588 | except nx.exception.NetworkXUnfeasible: 589 | raise ValueError("Cycles detected in graph: %s", nx.find_cycle(graph, 590 | orientation='reverse')) 591 | 592 | return order 593 | 594 | 595 | def reorder_cyclical_test_dependencies(graph): 596 | """By default, we make things that depend on earlier outputs for build wait for tests of 597 | the earlier thing to pass. However, circular dependencies spread across run/test and 598 | build/host can make this approach incorrect. For example: 599 | 600 | A <-- B : B depends on A at build time 601 | B <-- A : A depends on B at run time. We can build A before B, but we cannot test A until B 602 | is built. 603 | 604 | To resolve this, we must reorder the graph edges: 605 | 606 | build A <-- test A <--> build B <-- test B 607 | 608 | must become: 609 | 610 | build A <-- build B <-- test A <-- test B 611 | """ 612 | # find all test nodes with edges to build nodes 613 | test_nodes = [node for node in graph.nodes() if node.startswith('test-')] 614 | edges_from_test_to_build = [edge for edge in graph.edges() if edge[0] in test_nodes and 615 | edge[1].startswith('build-')] 616 | 617 | # find any of their inverses. Entries here are of the form (test-A, build-B) 618 | circular_deps = [edge for edge in edges_from_test_to_build 619 | if (edge[1], edge[0]) in graph.edges()] 620 | 621 | for (testA, buildB) in circular_deps: 622 | # remove build B dependence on test A 623 | graph.remove_edge(testA, buildB) 624 | # remove test B dependence on build B 625 | testB = buildB.replace('build-', 'test-', 1) 626 | graph.remove_edge(buildB, testB) 627 | # Add test B dependence on test A 628 | graph.add_edge(testA, testB) 629 | # make sure that test A still depends on build B 630 | assert (buildB, testA) in graph.edges() 631 | # graph is modified in place. No return necessary. 632 | --------------------------------------------------------------------------------