├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── build-upload.yml │ ├── enforce-sha.yaml │ ├── pre-commit.yml │ ├── rust.yml │ ├── setup-dev-drive.ps1 │ └── testsuite.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Cargo.lock ├── Cargo.toml ├── README.md ├── RELEASE.md ├── crates ├── pixi-build-backend │ ├── Cargo.toml │ └── src │ │ ├── cache.rs │ │ ├── cli.rs │ │ ├── common │ │ ├── configuration.rs │ │ ├── mod.rs │ │ ├── requirements.rs │ │ └── variants.rs │ │ ├── compilers.rs │ │ ├── consts.rs │ │ ├── dependencies.rs │ │ ├── lib.rs │ │ ├── project.rs │ │ ├── protocol.rs │ │ ├── server.rs │ │ ├── source.rs │ │ ├── tools.rs │ │ ├── traits │ │ ├── mod.rs │ │ ├── package_spec.rs │ │ ├── project.rs │ │ └── targets.rs │ │ ├── utils │ │ ├── mod.rs │ │ └── temporary_recipe.rs │ │ └── variants.rs ├── pixi-build-cmake │ ├── Cargo.toml │ └── src │ │ ├── build_script.j2 │ │ ├── build_script.rs │ │ ├── cmake.rs │ │ ├── config.rs │ │ ├── main.rs │ │ ├── protocol.rs │ │ └── snapshots │ │ └── pixi_build_cmake__cmake__tests__setting_host_and_build_requirements.snap ├── pixi-build-python │ ├── Cargo.toml │ └── src │ │ ├── build_script.j2 │ │ ├── build_script.rs │ │ ├── config.rs │ │ ├── main.rs │ │ ├── protocol.rs │ │ ├── python.rs │ │ └── snapshots │ │ ├── pixi_build_python__python__tests__entry_point_nested.snap │ │ ├── pixi_build_python__python__tests__entry_point_not_a_module.snap │ │ ├── pixi_build_python__python__tests__entry_point_not_string.snap │ │ ├── pixi_build_python__python__tests__entry_points_are_read.snap │ │ ├── pixi_build_python__python__tests__noarch_none.snap │ │ ├── pixi_build_python__python__tests__noarch_python.snap │ │ ├── pixi_build_python__python__tests__recipe_from_pyproject_toml.snap │ │ ├── pixi_build_python__python__tests__scripts_are_respected.snap │ │ ├── pixi_build_python__python__tests__setting_host_and_build_requirements-2.snap │ │ ├── pixi_build_python__python__tests__setting_host_and_build_requirements.snap │ │ └── pixi_build_python__python__tests__wrong_entry_points.snap ├── pixi-build-rattler-build │ ├── Cargo.toml │ └── src │ │ ├── config.rs │ │ ├── main.rs │ │ ├── protocol.rs │ │ └── rattler_build.rs └── pixi-build-rust │ ├── Cargo.toml │ ├── pixi.lock │ └── src │ ├── build_script.j2 │ ├── build_script.rs │ ├── config.rs │ ├── main.rs │ ├── protocol.rs │ ├── rust.rs │ └── snapshots │ ├── pixi_build_rust__build_script__test__build_script@bash.snap │ ├── pixi_build_rust__build_script__test__build_script@cmdexe.snap │ ├── pixi_build_rust__build_script__test__openssl@bash.snap │ ├── pixi_build_rust__build_script__test__openssl@cmdexe.snap │ ├── pixi_build_rust__build_script__test__sccache@bash.snap │ ├── pixi_build_rust__build_script__test__sccache@cmdexe.snap │ └── pixi_build_rust__rust__tests__rust_is_in_build_requirements.snap ├── intra-doc-links.bash ├── pixi.lock ├── pixi.toml ├── recipe ├── pixi-build-api-version.yaml ├── pixi-build-cmake.yaml ├── pixi-build-python.yaml ├── pixi-build-rattler-build.yaml └── pixi-build-rust.yaml ├── renovate.json5 ├── rust-toolchain ├── schema ├── model.py └── schema.json ├── scripts ├── activate.sh └── generate-matrix.py └── tests └── recipe └── boltons └── recipe.yaml /.gitattributes: -------------------------------------------------------------------------------- 1 | # GitHub syntax highlighting 2 | pixi.lock linguist-language=YAML linguist-generated=true 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | labels: 8 | - "dependencies" 9 | commit-message: 10 | prefix: "chore(ci)" 11 | - package-ecosystem: "cargo" 12 | directory: "/" 13 | schedule: 14 | interval: "weekly" 15 | labels: 16 | - "dependencies" 17 | commit-message: 18 | prefix: "bump" 19 | open-pull-requests-limit: 10 20 | -------------------------------------------------------------------------------- /.github/workflows/build-upload.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | # Run full workflow on tags 4 | tags: 5 | - "pixi-build-cmake-v[0-9]+.[0-9]+.[0-9]+" 6 | - "pixi-build-python-v[0-9]+.[0-9]+.[0-9]+" 7 | - "pixi-build-rattler-build-v[0-9]+.[0-9]+.[0-9]+" 8 | - "pixi-build-rust-v[0-9]+.[0-9]+.[0-9]+" 9 | # Build all backends on main branch 10 | branches: [main] 11 | workflow_dispatch: 12 | 13 | 14 | name: "Conda Packages" 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.ref_name }} 18 | cancel-in-progress: true 19 | 20 | permissions: 21 | id-token: write 22 | contents: read 23 | 24 | jobs: 25 | generate-matrix: 26 | runs-on: ubuntu-latest 27 | outputs: 28 | matrix: ${{ steps.set_version.outputs.matrix }} 29 | steps: 30 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 31 | - uses: prefix-dev/setup-pixi@19eac09b398e3d0c747adc7921926a6d802df4da # v0.8.8 32 | - name: Extract versions 33 | id: set_version 34 | run: | 35 | # extract names and versions from cargo metadata 36 | # and generate a matrix entries for the build job 37 | MATRIX_JSON=$(pixi run generate-matrix) 38 | 39 | echo "Generated matrix: $MATRIX_JSON" 40 | echo "matrix=$MATRIX_JSON" >> $GITHUB_OUTPUT 41 | 42 | build: 43 | needs: generate-matrix 44 | env: 45 | REPO_NAME: "prefix-dev/pixi-build-backends" 46 | strategy: 47 | matrix: 48 | bins: ${{ fromJSON(needs.generate-matrix.outputs.matrix) }} 49 | fail-fast: false 50 | runs-on: ${{ matrix.bins.os }} 51 | steps: 52 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 53 | - uses: prefix-dev/setup-pixi@19eac09b398e3d0c747adc7921926a6d802df4da # v0.8.8 54 | with: 55 | environments: build 56 | - name: Enable long paths (Windows) 57 | if: ${{ matrix.bins.os == 'windows-latest' }} 58 | run: | 59 | git config --global core.longpaths true 60 | shell: bash 61 | - name: Set environment variable for recipe version 62 | shell: bash 63 | run: | 64 | echo "${{ matrix.bins.env_name }}=${{ matrix.bins.version }}" >> $GITHUB_ENV 65 | - name: Build ${{ matrix.bins.bin }} 66 | shell: bash 67 | env: 68 | RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION: "true" 69 | RATTLER_BUILD_COLOR: "always" 70 | run: | 71 | pixi run build-recipe-ci $RUNNER_TEMP recipe/${{ matrix.bins.bin }}.yaml ${{ matrix.bins.target }} 72 | - name: Upload build artifacts 73 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 74 | with: 75 | name: conda-packages-${{ matrix.bins.bin }}-${{ matrix.bins.target }} 76 | path: ${{ runner.temp }}/**/*.conda 77 | 78 | aggregate: 79 | needs: build 80 | runs-on: ubuntu-latest 81 | strategy: 82 | matrix: 83 | target: [linux-64, linux-aarch64, linux-ppc64le, win-64, osx-64, osx-arm64] 84 | steps: 85 | - name: Download conda package artifacts for ${{ matrix.target }} 86 | uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 87 | with: 88 | pattern: conda-packages-*-${{ matrix.target }} 89 | path: conda-artifacts-${{ matrix.target }} 90 | merge-multiple: true 91 | - name: Upload aggregated conda packages for ${{ matrix.target }} 92 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 93 | with: 94 | name: conda-packages-${{ matrix.target }} 95 | path: conda-artifacts-${{ matrix.target }}/**/*.conda 96 | 97 | upload: 98 | needs: aggregate 99 | runs-on: ubuntu-latest 100 | if: ${{ startsWith(github.ref, 'refs/tags') && github.repository == 'prefix-dev/pixi-build-backends' }} 101 | steps: 102 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 103 | - name: Download all conda packages 104 | uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 105 | with: 106 | pattern: conda-packages-* 107 | path: conda-packages 108 | merge-multiple: true 109 | run-id: ${{ github.run_id }} 110 | - uses: prefix-dev/setup-pixi@19eac09b398e3d0c747adc7921926a6d802df4da # v0.8.8 111 | with: 112 | environments: build 113 | - name: Upload packages 114 | shell: bash 115 | run: | 116 | for file in conda-packages/**/*.conda; do 117 | echo "Uploading ${file}" 118 | pixi run -e build rattler-build upload prefix -c pixi-build-backends "$file" 119 | done 120 | -------------------------------------------------------------------------------- /.github/workflows/enforce-sha.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: [main] 4 | pull_request: 5 | 6 | name: Security 7 | 8 | jobs: 9 | ensure-pinned-actions: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout code 13 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 14 | - name: Ensure SHA pinned actions 15 | uses: zgosalvez/github-actions-ensure-sha-pinned-actions@fc87bb5b5a97953d987372e74478de634726b3e5 # v3 16 | with: 17 | allowlist: | 18 | prefix-dev/ 19 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: Pre-commit 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | 8 | jobs: 9 | pre-commit: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 13 | - name: Set up pixi 14 | uses: prefix-dev/setup-pixi@v0.8.8 15 | with: 16 | environments: lint 17 | - name: pre-commit 18 | run: pixi run pre-commit-run --color=always --show-diff-on-failure 19 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - "main" 5 | pull_request: 6 | 7 | name: Rust 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }} 11 | cancel-in-progress: true 12 | 13 | env: 14 | RUST_LOG: info 15 | RUST_BACKTRACE: 1 16 | RUSTFLAGS: "-D warnings" 17 | CARGO_TERM_COLOR: always 18 | 19 | jobs: 20 | check-rustdoc-links: 21 | name: Check intra-doc links 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 25 | - uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9 # v1 26 | - shell: bash 27 | run: > 28 | ./intra-doc-links.bash 29 | format_and_lint: 30 | name: Format and Lint 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 34 | - uses: prefix-dev/setup-pixi@main 35 | with: 36 | cache: ${{ github.ref == 'refs/heads/main' }} 37 | - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 38 | with: 39 | workspaces: ". -> target/pixi" 40 | key: ${{ hashFiles('pixi.lock') }} 41 | save-if: ${{ github.ref == 'refs/heads/main' }} 42 | - name: Run rustfmt 43 | run: pixi run cargo-fmt 44 | - name: Run clippy 45 | run: pixi run cargo-clippy 46 | 47 | test: 48 | name: Test 49 | strategy: 50 | fail-fast: false 51 | matrix: 52 | os: [ubuntu-latest, macos-latest, windows-latest] 53 | runs-on: ${{ matrix.os }} 54 | needs: [format_and_lint] 55 | steps: 56 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 57 | - uses: prefix-dev/setup-pixi@main 58 | with: 59 | cache: ${{ github.ref == 'refs/heads/main' }} 60 | - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 61 | with: 62 | workspaces: ". -> target/pixi" 63 | key: ${{ hashFiles('pixi.lock') }} 64 | save-if: ${{ github.ref == 'refs/heads/main' }} 65 | - name: Run Rust tests 66 | run: pixi run nextest 67 | 68 | - name: Run doctests 69 | run: pixi run doctest 70 | -------------------------------------------------------------------------------- /.github/workflows/setup-dev-drive.ps1: -------------------------------------------------------------------------------- 1 | # Configures a drive for testing in CI. 2 | # Credits to astral-sh/uv: https://github.com/astral-sh/uv/blob/d2b9ffdc9e3f336e46b0af18a8554de560bfbefc/.github/workflows/setup-dev-drive.ps1 3 | 4 | # When not using a GitHub Actions "larger runner", the `D:` drive is present and 5 | # has similar or better performance characteristics than a ReFS dev drive. 6 | # Sometimes using a larger runner is still more performant (e.g., when running 7 | # the test suite) and we need to create a dev drive. This script automatically 8 | # configures the appropriate drive. 9 | if (Test-Path "D:\") { 10 | Write-Output "Using existing drive at D:" 11 | $Drive = "D:" 12 | } else { 13 | # The size (20 GB) is chosen empirically to be large enough for our 14 | # workflows; larger drives can take longer to set up. 15 | $Volume = New-VHD -Path C:/pixi_dev_drive.vhdx -SizeBytes 20GB | 16 | Mount-VHD -Passthru | 17 | Initialize-Disk -Passthru | 18 | New-Partition -AssignDriveLetter -UseMaximumSize | 19 | Format-Volume -DevDrive -Confirm:$false -Force 20 | 21 | $Drive = "$($Volume.DriveLetter):" 22 | 23 | # Set the drive as trusted 24 | # See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted 25 | fsutil devdrv trust $Drive 26 | 27 | # Disable antivirus filtering on dev drives 28 | # See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive 29 | fsutil devdrv enable /disallowAv 30 | 31 | # Remount so the changes take effect 32 | Dismount-VHD -Path C:/pixi_dev_drive.vhdx 33 | Mount-VHD -Path C:/pixi_dev_drive.vhdx 34 | 35 | # Show some debug information 36 | Write-Output $Volume 37 | fsutil devdrv query $Drive 38 | 39 | Write-Output "Using Dev Drive at $Volume" 40 | } 41 | 42 | $Tmp = "$($Drive)/pixi-tmp" 43 | 44 | # Create the directory ahead of time in an attempt to avoid race-conditions 45 | New-Item $Tmp -ItemType Directory 46 | 47 | # Move Cargo to the dev drive 48 | New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force 49 | if (Test-Path "C:/Users/runneradmin/.cargo") { 50 | Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force 51 | } 52 | 53 | Write-Output ` 54 | "DEV_DRIVE=$($Drive)" ` 55 | "TMP=$($Tmp)" ` 56 | "TEMP=$($Tmp)" ` 57 | "RATTLER_CACHE_DIR=$($Drive)/rattler-cache" ` 58 | "RUSTUP_HOME=$($Drive)/.rustup" ` 59 | "CARGO_HOME=$($Drive)/.cargo" ` 60 | "PIXI_HOME=$($Drive)/.pixi" ` 61 | "PIXI_WORKSPACE=$($Drive)/pixi" ` 62 | >> $env:GITHUB_ENV 63 | -------------------------------------------------------------------------------- /.github/workflows/testsuite.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - "main" 5 | pull_request: 6 | 7 | name: Testsuite 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }} 11 | cancel-in-progress: true 12 | 13 | env: 14 | RUST_LOG: info 15 | RUST_BACKTRACE: 1 16 | RUSTFLAGS: "-D warnings" 17 | CARGO_TERM_COLOR: always 18 | 19 | jobs: 20 | build-binary-linux-x86_64: 21 | name: Build Ubuntu 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 25 | - uses: prefix-dev/setup-pixi@main 26 | with: 27 | cache: ${{ github.ref == 'refs/heads/main' }} 28 | - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 29 | with: 30 | workspaces: ". -> target/pixi" 31 | key: ${{ hashFiles('pixi.lock') }} 32 | save-if: ${{ github.ref == 'refs/heads/main' }} 33 | - name: Build backends 34 | run: pixi run build-release 35 | - name: Upload executables as artifacts 36 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 37 | with: 38 | name: pixi-build-backends-linux-x86_64 39 | path: | 40 | target/pixi/release/pixi-build-* 41 | !target/pixi/release/pixi-build-*.d 42 | if-no-files-found: error 43 | retention-days: 60 44 | 45 | build-binary-windows-x86_64: 46 | name: Build Windows 47 | runs-on: windows-latest 48 | steps: 49 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 50 | - name: Create Dev Drive 51 | run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 52 | - name: Copy Git Repo to Dev Drive 53 | run: | 54 | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.PIXI_WORKSPACE }}" -Recurse 55 | - uses: prefix-dev/setup-pixi@main 56 | with: 57 | manifest-path: ${{ env.PIXI_WORKSPACE }}/pixi.toml 58 | cache: ${{ github.ref == 'refs/heads/main' }} 59 | - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 60 | with: 61 | workspaces: ". -> ${{ env.PIXI_WORKSPACE }}/target/pixi" 62 | key: ${{ hashFiles('pixi.lock') }} 63 | save-if: ${{ github.ref == 'refs/heads/main' }} 64 | - name: Build backends 65 | run: pixi run build-release 66 | working-directory: ${{ env.PIXI_WORKSPACE }} 67 | - name: Upload executables as artifacts 68 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 69 | with: 70 | name: pixi-build-backends-windows-x86_64 71 | path: | 72 | ${{ env.PIXI_WORKSPACE }}/target/pixi/release/pixi-build-* 73 | !${{ env.PIXI_WORKSPACE }}/target/pixi/release/pixi-build-*.d 74 | if-no-files-found: error 75 | retention-days: 60 76 | 77 | build-binary-macos-aarch64: 78 | name: Build macOS 79 | runs-on: macos-latest 80 | steps: 81 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 82 | - uses: prefix-dev/setup-pixi@main 83 | with: 84 | cache: ${{ github.ref == 'refs/heads/main' }} 85 | - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 86 | with: 87 | workspaces: ". -> target/pixi" 88 | key: ${{ hashFiles('pixi.lock') }} 89 | save-if: ${{ github.ref == 'refs/heads/main' }} 90 | - name: Build backends 91 | run: pixi run build-release 92 | - name: Upload executables as artifacts 93 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 94 | with: 95 | name: pixi-build-backends-macos-aarch64 96 | path: | 97 | target/pixi/release/pixi-build-* 98 | !target/pixi/release/pixi-build-*.d 99 | if-no-files-found: error 100 | retention-days: 60 101 | 102 | test-linux-x86_64: 103 | timeout-minutes: 10 104 | name: Test Linux x86_64 105 | runs-on: ubuntu-latest 106 | needs: build-binary-linux-x86_64 107 | steps: 108 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 109 | with: 110 | repository: prefix-dev/pixi-build-testsuite 111 | 112 | - name: Set up pixi 113 | uses: prefix-dev/setup-pixi@main 114 | with: 115 | cache: true 116 | cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} 117 | 118 | - name: Download pixi artifacts 119 | run: pixi run download-artifacts pixi 120 | env: 121 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 122 | 123 | - name: Download pixi-build-backends artifacts 124 | run: pixi run download-artifacts pixi-build-backends --run-id ${{ github.run_id }} 125 | env: 126 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 127 | 128 | - name: Setup binary permissions 129 | run: chmod a+x artifacts/pixi* 130 | 131 | - name: Run integration tests 132 | run: pixi run --locked test-slow 133 | env: 134 | PIXI_BIN_DIR: ${{ github.workspace }}/artifacts 135 | BUILD_BACKENDS_BIN_DIR: ${{ github.workspace }}/artifacts 136 | 137 | test-windows-x86_64: 138 | timeout-minutes: 10 139 | name: Test Windows x86_64 140 | runs-on: windows-latest 141 | needs: build-binary-windows-x86_64 142 | steps: 143 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 144 | with: 145 | repository: prefix-dev/pixi-build-testsuite 146 | 147 | - name: Create Dev Drive 148 | run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 149 | 150 | - name: Copy Git Repo to Dev Drive 151 | run: Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.PIXI_WORKSPACE }}" -Recurse 152 | 153 | - name: Set up pixi 154 | uses: prefix-dev/setup-pixi@main 155 | with: 156 | manifest-path: ${{ env.PIXI_WORKSPACE }}/pixi.toml 157 | cache: true 158 | cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} 159 | 160 | - name: Download pixi artifacts 161 | working-directory: ${{ env.PIXI_WORKSPACE }} 162 | run: pixi run download-artifacts pixi 163 | env: 164 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 165 | 166 | - name: Download pixi-build-backends artifacts 167 | working-directory: ${{ env.PIXI_WORKSPACE }} 168 | run: pixi run download-artifacts pixi-build-backends --run-id ${{ github.run_id }} 169 | env: 170 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 171 | 172 | - name: Run integration tests 173 | run: pixi run --locked test-slow 174 | working-directory: ${{ env.PIXI_WORKSPACE }} 175 | env: 176 | PIXI_BIN_DIR: ${{ env.PIXI_WORKSPACE }}/artifacts 177 | BUILD_BACKENDS_BIN_DIR: ${{ env.PIXI_WORKSPACE }}/artifacts 178 | 179 | test-macos-aarch64: 180 | timeout-minutes: 10 181 | name: Test macOS aarch64 182 | runs-on: macos-14 183 | needs: build-binary-macos-aarch64 184 | steps: 185 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 186 | with: 187 | repository: prefix-dev/pixi-build-testsuite 188 | 189 | - name: Set up pixi 190 | uses: prefix-dev/setup-pixi@main 191 | with: 192 | cache: true 193 | cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }} 194 | 195 | - name: Download pixi artifacts 196 | run: pixi run download-artifacts pixi 197 | env: 198 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 199 | 200 | - name: Download pixi-build-backends artifacts 201 | run: pixi run download-artifacts pixi-build-backends --run-id ${{ github.run_id }} 202 | env: 203 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 204 | 205 | - name: Setup binary permissions 206 | run: chmod a+x artifacts/pixi* 207 | 208 | - name: Run integration tests 209 | run: pixi run --locked test-slow 210 | env: 211 | PIXI_BIN_DIR: ${{ github.workspace }}/artifacts 212 | BUILD_BACKENDS_BIN_DIR: ${{ github.workspace }}/artifacts 213 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | 3 | .vscode/ 4 | 5 | # Generated by Cargo 6 | # will have compiled files and executables 7 | debug/ 8 | target/ 9 | 10 | # If running rattler-build locally 11 | output/ 12 | 13 | # These are backup files generated by rustfmt 14 | **/*.rs.bk 15 | 16 | # MSVC Windows builds of rustc generate these, which store debugging information 17 | *.pdb 18 | 19 | **/.DS_Store 20 | 21 | # rattler 22 | .prefix 23 | 24 | # pixi 25 | .pixi/ 26 | 27 | # Visual studio files 28 | .vs/ 29 | 30 | # pixi environments 31 | .pixi 32 | *.egg-info 33 | 34 | # Output directory 35 | pixi-build-python-output/ 36 | **/repodata.json 37 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: (^.pixi/|.snap) 2 | repos: 3 | - repo: local 4 | hooks: 5 | - id: check-yaml 6 | name: check-yaml 7 | entry: pixi run -e lint check-yaml 8 | language: system 9 | types: [yaml] 10 | - id: end-of-file 11 | name: end-of-file 12 | entry: pixi run -e lint end-of-file-fixer 13 | language: system 14 | types: [text] 15 | stages: [pre-commit, pre-push, manual] 16 | - id: trailing-whitespace 17 | name: trailing-whitespace 18 | entry: pixi run -e lint trailing-whitespace-fixer 19 | language: system 20 | types: [text] 21 | stages: [pre-commit, pre-push, manual] 22 | # GitHub Actions 23 | - id: actionlint 24 | name: Lint GitHub Actions workflow files 25 | language: system 26 | entry: pixi run -e lint actionlint 27 | types: [yaml] 28 | files: ^\.github/workflows/ 29 | # typos 30 | - id: typos 31 | name: typos 32 | entry: pixi run -e lint typos --write-changes --force-exclude 33 | language: system 34 | types: [text] 35 | # TOML format 36 | - id: taplo 37 | name: taplo 38 | entry: pixi run -e lint toml-format 39 | language: system 40 | types: [file, toml] 41 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["crates/*"] 3 | resolver = "2" 4 | 5 | [workspace.package] 6 | edition = "2024" 7 | 8 | [workspace.dependencies] 9 | async-trait = "0.1.86" 10 | chrono = "0.4.39" 11 | clap = "4.5.29" 12 | clap-verbosity-flag = "3.0.2" 13 | fs-err = "3.1.0" 14 | indexmap = "2.7.1" 15 | itertools = "0.14.0" 16 | log = "0.4.25" 17 | miette = "7.5.0" 18 | minijinja = "2.7.0" 19 | parking_lot = "0.12.3" 20 | reqwest = { version = "0.12.12", default-features = false, features = [ 21 | "rustls-tls", 22 | "rustls-tls-native-roots", 23 | ] } 24 | reqwest-middleware = "0.4.0" 25 | rstest = "0.25.0" 26 | serde = "1.0" 27 | serde_yaml = "0.9" 28 | serde_json = "1.0" 29 | tempfile = "3.16.0" 30 | toml_edit = "0.22.24" 31 | tokio = "1.43.0" 32 | tracing-subscriber = "0.3.19" 33 | url = "2.5.4" 34 | pyproject-toml = "0.13.4" 35 | dirs = "6.0.0" 36 | pathdiff = "0.2.3" 37 | 38 | jsonrpc-stdio-server = "18.0.0" 39 | jsonrpc-http-server = "18.0.0" 40 | jsonrpc-core = "18.0.0" 41 | 42 | rattler-build = { version = "*", default-features = false, features = [ 43 | "rustls-tls", 44 | ] } 45 | 46 | rattler_conda_types = { version = "0.32.0", default-features = false } 47 | rattler_package_streaming = { version = "0.22.35", default-features = false } 48 | rattler_virtual_packages = { version = "2.0.9", default-features = false } 49 | 50 | pixi_build_types = { version = "*" } 51 | pixi_consts = { version = "*" } 52 | pixi_manifest = { version = "*" } 53 | pixi_spec = { version = "*" } 54 | pixi_build_type_conversions = { version = "*" } 55 | 56 | pixi-build-backend = { path = "crates/pixi-build-backend" } 57 | 58 | [patch.crates-io] 59 | pixi_build_types = { git = "https://github.com/prefix-dev/pixi", branch = "main" } 60 | pixi_consts = { git = "https://github.com/prefix-dev/pixi", branch = "main" } 61 | pixi_manifest = { git = "https://github.com/prefix-dev/pixi", branch = "main" } 62 | pixi_spec = { git = "https://github.com/prefix-dev/pixi", branch = "main" } 63 | pixi_build_type_conversions = { git = "https://github.com/prefix-dev/pixi", branch = "main" } 64 | 65 | #pixi_build_types = { path = "../pixi/crates/pixi_build_types" } 66 | #pixi_consts = { path = "../pixi/crates/pixi_consts" } 67 | #pixi_manifest = { path = "../pixi/crates/pixi_manifest" } 68 | #pixi_spec = { path = "../pixi/crates/pixi_spec" } 69 | #pixi_build_type_conversions = { path = "../pixi/crates/pixi_build_type_conversions" } 70 | 71 | #rattler_cache = { path = "../rattler/crates/rattler_cache" } 72 | #rattler_conda_types = { path = "../rattler/crates/rattler_conda_types" } 73 | #rattler_package_streaming = { path = "../rattler/crates/rattler_package_streaming" } 74 | #rattler_virtual_packages = { path = "../rattler/crates/rattler_virtual_packages" } 75 | #rattler_repodata_gateway = { path = "../rattler/crates/rattler_repodata_gateway" } 76 | #simple_spawn_blocking = { path = "../rattler/crates/simple_spawn_blocking" } 77 | 78 | rattler-build = { git = "https://github.com/prefix-dev/rattler-build", branch = "main" } 79 | #rattler-build = { path = "../rattler-build" } 80 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pixi Build Backends 2 | 3 | **Work in Progress: Backend Implementations for Building Pixi Projects from Source** 4 | 5 | This repository contains backend implementations designed to facilitate the building of pixi projects directly from their source code. These backends aim to enhance the functionality of Pixi, a cross-platform, multi-language package manager and workflow tool built on the foundation of the conda ecosystem. 6 | 7 | ## Available Build Backends 8 | The idea is that a backend should be able to build a certain type of so 9 | The repository provides the following build backends: 10 | 11 | 1. **pixi-build-python**: A backend tailored for building Python-based projects. 12 | 2. **pixi-build-cmake**: A backend designed for projects utilizing CMake as their build system. 13 | 3. **pixi-build-rattler-build**: A backend for building [`recipe.yaml`](https://rattler.build/latest/) directly 14 | 4. **pixi-build-rust**: A backend for building Rust projects. 15 | 16 | 17 | These backends are located in the `crates/*` directory of the repository. 18 | 19 | ## Features 20 | * **Backend Implementations**: Provides the necessary components to build Pixi projects from source, integrating seamlessly with the Pixi ecosystem. 21 | * **Schema Definitions**: Includes schema definitions to standardize and validate project configurations. 22 | 23 | ## Getting Started 24 | 25 | **Note**: This project is currently a work in progress. Functionality and documentation are under active development. 26 | All of these backends are directly uploaded to the [Pixi Build Backends](https://prefix.dev/channels/pixi-build-backends). 27 | So will be utilized in pixi directly. We want to move these to conda-forge eventually. 28 | 29 | For example, this `build-section` will use the python backend to build a python project: 30 | 31 | ```toml 32 | [build-system] 33 | # The name of the build backend to use. This name refers both to the name of 34 | # the package that provides the build backend and the name of the executable 35 | # inside the package that is invoked. 36 | # 37 | # The `build-backend` key also functions as a dependency declaration. At least 38 | # a version specifier must be added. 39 | build-backend = { name = "pixi-build-python", version = "*" } 40 | # These are the conda channels that are used to resolve the dependencies of the 41 | # build backend package. 42 | channels = [ 43 | "https://prefix.dev/pixi-build-backends", 44 | "https://prefix.dev/conda-forge", 45 | ] 46 | ``` 47 | 48 | 49 | ### Developing on Backends 50 | 51 | Even though binary versions are available on the prefix channels, its also quite easy to get started on developing a new backend or work on an existing one. 52 | To start development make sure you have installed [pixi](https://pixi.sh). After which, a number of command should be available: 53 | 54 | ```bash 55 | # To build the backends 56 | pixi run build 57 | # .. to install a backend, for example the python one: 58 | pixi r install-pixi-build-python 59 | ``` 60 | 61 | You can make use of these backends to overwrite any existing backend in pixi. 62 | This is described in the [pixi docs](https://pixi.sh/dev/build/backends/) 63 | 64 | ## Contributing 65 | Contributions are welcome! Please refer to the contributing guidelines for more information. 66 | License 67 | 68 | This project is licensed under the BSD-3-Clause License. See the LICENSE file for details. 69 | Acknowledgements 70 | 71 | ## Acknowledgemts 72 | Developed by prefix.dev. 73 | For more information about Pixi and related projects, visit the [prefix-dev](https://github.com/prefix-dev) organization on GitHub. 74 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Notes 2 | 3 | ## Overview 4 | `rattler-build.yml` workflow automates the process of building and publishing pixi build backends as conda packages. 5 | The workflow is triggered by: 6 | 7 | - A `push` event with tags matching: 8 | - `pixi-build-cmake-vX.Y.Z` 9 | - `pixi-build-python-vX.Y.Z` 10 | - `pixi-build-rattler-build-vX.Y.Z` 11 | - A `pull_request` event 12 | 13 | 14 | Note: Actual releases are only triggered by git tags matching the patterns above. 15 | Pull requests will build the packages but not publish them 16 | 17 | 18 | ## Usage Instructions 19 | 20 | ### Triggering a Release 21 | - Bump the version in the `Cargo.toml` for the backend you want to release. 22 | - Open a pull request 23 | - After the pull request is merged, create a new tag following the pattern `pixi-build--vX.Y.Z` (e.g., `pixi-build-cmake-v1.2.3`) 24 | - Push the tag to the repository: 25 | ```sh 26 | git tag pixi-build-cmake-v1.2.3 27 | git push origin pixi-build-cmake-v1.2.3 28 | ``` 29 | - The workflow will automatically build and upload the package. 30 | 31 | ### Adding a new backend 32 | When adding a new backend, you will need to add a new backend tag to the `rattler-build.yml` workflow. 33 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pixi-build-backend" 3 | version = "0.1.3" 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | chrono = { workspace = true } 9 | clap = { workspace = true, features = ["derive", "env"] } 10 | clap-verbosity-flag = { workspace = true } 11 | fs-err = { workspace = true } 12 | indexmap = { workspace = true } 13 | itertools = { workspace = true } 14 | log = { workspace = true } 15 | miette = { workspace = true } 16 | minijinja = { workspace = true } 17 | parking_lot = { workspace = true } 18 | rattler_conda_types = { workspace = true } 19 | rattler_package_streaming = { workspace = true } 20 | rattler_virtual_packages = { workspace = true } 21 | rattler-build = { workspace = true } 22 | reqwest = { workspace = true } 23 | reqwest-middleware = { workspace = true } 24 | serde = { workspace = true, features = ["derive"] } 25 | serde_yaml = { workspace = true } 26 | serde_json = { workspace = true } 27 | toml_edit = { workspace = true } 28 | tempfile = { workspace = true } 29 | tokio = { workspace = true, features = ["macros"] } 30 | tracing-subscriber = { workspace = true } 31 | url = { workspace = true } 32 | pyproject-toml = { workspace = true } 33 | dirs = { workspace = true } 34 | pathdiff = { workspace = true } 35 | 36 | pixi_build_types = { workspace = true } 37 | pixi_consts = { workspace = true } 38 | pixi_manifest = { workspace = true } 39 | pixi_spec = { workspace = true } 40 | pixi_build_type_conversions = { workspace = true } 41 | 42 | jsonrpc-stdio-server = { workspace = true } 43 | jsonrpc-http-server = { workspace = true } 44 | jsonrpc-core = { workspace = true } 45 | 46 | [dev-dependencies] 47 | insta = { version = "1.42.1", features = ["yaml", "redactions", "filters"] } 48 | toml_edit = { version = "0.22.24" } 49 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/cache.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::{ProjectModel, Targets, traits::Dependencies}; 4 | 5 | pub fn sccache_tools() -> Vec { 6 | vec!["sccache".to_string()] 7 | } 8 | 9 | pub fn enable_sccache(env: HashMap) -> bool { 10 | env.keys().any(|k| k.starts_with("SCCACHE")) 11 | } 12 | 13 | pub fn add_sccache<'a, P: ProjectModel>( 14 | dependencies: &mut Dependencies<'a, ::Spec>, 15 | sccache_tools: &'a [String], 16 | empty_spec: &'a <

::Targets as Targets>::Spec, 17 | ) { 18 | for cache_tool in sccache_tools { 19 | if !dependencies.build.contains_key(&cache_tool) { 20 | dependencies.build.insert(cache_tool, empty_spec); 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/cli.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use clap::{Parser, Subcommand}; 4 | use clap_verbosity_flag::{InfoLevel, Verbosity}; 5 | use miette::{Context, IntoDiagnostic}; 6 | use pixi_build_types::{ 7 | BackendCapabilities, ChannelConfiguration, FrontendCapabilities, PlatformAndVirtualPackages, 8 | procedures::{ 9 | conda_build::CondaBuildParams, 10 | conda_metadata::{CondaMetadataParams, CondaMetadataResult}, 11 | initialize::InitializeParams, 12 | negotiate_capabilities::NegotiateCapabilitiesParams, 13 | }, 14 | }; 15 | use rattler_build::console_utils::{LoggingOutputHandler, get_default_env_filter}; 16 | use rattler_conda_types::{ChannelConfig, GenericVirtualPackage, Platform}; 17 | use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; 18 | use tempfile::TempDir; 19 | use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; 20 | 21 | use crate::{ 22 | consts, 23 | project::to_project_model, 24 | protocol::{Protocol, ProtocolInstantiator}, 25 | server::Server, 26 | }; 27 | 28 | #[allow(missing_docs)] 29 | #[derive(Parser)] 30 | pub struct App { 31 | /// The subcommand to run. 32 | #[clap(subcommand)] 33 | command: Option, 34 | 35 | /// The port to expose the json-rpc server on. If not specified will 36 | /// communicate with stdin/stdout. 37 | #[clap(long)] 38 | http_port: Option, 39 | 40 | /// Enable verbose logging. 41 | #[command(flatten)] 42 | verbose: Verbosity, 43 | } 44 | 45 | #[derive(Subcommand)] 46 | pub enum Commands { 47 | /// Get conda metadata for a recipe. 48 | GetCondaMetadata { 49 | #[clap(env, long, env = "PIXI_PROJECT_MANIFEST", default_value = consts::WORKSPACE_MANIFEST)] 50 | manifest_path: PathBuf, 51 | 52 | #[clap(long)] 53 | host_platform: Option, 54 | }, 55 | /// Build a conda package. 56 | CondaBuild { 57 | #[clap(env, long, env = "PIXI_PROJECT_MANIFEST", default_value = consts::WORKSPACE_MANIFEST)] 58 | manifest_path: PathBuf, 59 | }, 60 | /// Get the capabilities of the backend. 61 | Capabilities, 62 | } 63 | 64 | /// Run the sever on the specified port or over stdin/stdout. 65 | async fn run_server(port: Option, protocol: T) -> miette::Result<()> { 66 | let server = Server::new(protocol); 67 | if let Some(port) = port { 68 | server.run_over_http(port) 69 | } else { 70 | // running over stdin/stdout 71 | server.run().await 72 | } 73 | } 74 | 75 | /// Run the main CLI. 76 | pub async fn main T>( 77 | factory: F, 78 | ) -> miette::Result<()> { 79 | let args = App::parse(); 80 | 81 | // Setup logging 82 | let log_handler = LoggingOutputHandler::default(); 83 | 84 | let registry = tracing_subscriber::registry() 85 | .with(get_default_env_filter(args.verbose.log_level_filter()).into_diagnostic()?); 86 | 87 | registry.with(log_handler.clone()).init(); 88 | 89 | let factory = factory(log_handler); 90 | 91 | match args.command { 92 | None => run_server(args.http_port, factory).await, 93 | Some(Commands::Capabilities) => { 94 | let backend_capabilities = capabilities::().await?; 95 | eprintln!( 96 | "Supports conda metadata: {}", 97 | backend_capabilities 98 | .provides_conda_metadata 99 | .unwrap_or_default() 100 | ); 101 | eprintln!( 102 | "Supports conda build: {}", 103 | backend_capabilities 104 | .provides_conda_build 105 | .unwrap_or_default() 106 | ); 107 | eprintln!( 108 | "Highest project model: {}", 109 | backend_capabilities 110 | .highest_supported_project_model 111 | .map(|v| v.to_string()) 112 | .unwrap_or_else(|| String::from("None")) 113 | ); 114 | Ok(()) 115 | } 116 | Some(Commands::CondaBuild { manifest_path }) => build(factory, &manifest_path).await, 117 | Some(Commands::GetCondaMetadata { 118 | manifest_path, 119 | host_platform, 120 | }) => { 121 | let metadata = conda_get_metadata(factory, &manifest_path, host_platform).await?; 122 | println!("{}", serde_yaml::to_string(&metadata).unwrap()); 123 | Ok(()) 124 | } 125 | } 126 | } 127 | 128 | /// Negotiate the capabilities of the backend and initialize the backend. 129 | async fn initialize( 130 | factory: T, 131 | manifest_path: &Path, 132 | ) -> miette::Result> { 133 | // Negotiate the capabilities of the backend. 134 | let capabilities = capabilities::().await?; 135 | let channel_config = ChannelConfig::default_with_root_dir( 136 | manifest_path 137 | .parent() 138 | .expect("manifest should always reside in a directory") 139 | .to_path_buf(), 140 | ); 141 | let project_model = to_project_model( 142 | manifest_path, 143 | &channel_config, 144 | capabilities.highest_supported_project_model, 145 | )?; 146 | 147 | // Check if the project model is required 148 | // and if it is not present, return an error. 149 | if capabilities.highest_supported_project_model.is_some() && project_model.is_none() { 150 | miette::bail!( 151 | "Could not extract 'project_model' from: {}, while it is required", 152 | manifest_path.display() 153 | ); 154 | } 155 | 156 | // Initialize the backend 157 | let (protocol, _initialize_result) = factory 158 | .initialize(InitializeParams { 159 | manifest_path: manifest_path.to_path_buf(), 160 | project_model, 161 | cache_directory: None, 162 | configuration: None, 163 | }) 164 | .await?; 165 | Ok(protocol) 166 | } 167 | 168 | /// Frontend implementation for getting conda metadata. 169 | async fn conda_get_metadata( 170 | factory: T, 171 | manifest_path: &Path, 172 | host_platform: Option, 173 | ) -> miette::Result { 174 | let channel_config = ChannelConfig::default_with_root_dir( 175 | manifest_path 176 | .parent() 177 | .expect("manifest should always reside in a directory") 178 | .to_path_buf(), 179 | ); 180 | 181 | let protocol = initialize(factory, manifest_path).await?; 182 | 183 | let virtual_packages: Vec<_> = VirtualPackage::detect(&VirtualPackageOverrides::from_env()) 184 | .into_diagnostic()? 185 | .into_iter() 186 | .map(GenericVirtualPackage::from) 187 | .collect(); 188 | 189 | let tempdir = TempDir::new_in(".") 190 | .into_diagnostic() 191 | .context("failed to create a temporary directory in the current directory")?; 192 | 193 | protocol 194 | .conda_get_metadata(CondaMetadataParams { 195 | build_platform: None, 196 | host_platform: host_platform.map(|platform| PlatformAndVirtualPackages { 197 | platform, 198 | virtual_packages: Some(virtual_packages.clone()), 199 | }), 200 | channel_base_urls: None, 201 | channel_configuration: ChannelConfiguration { 202 | base_url: channel_config.channel_alias, 203 | }, 204 | work_directory: tempdir.path().to_path_buf(), 205 | variant_configuration: None, 206 | }) 207 | .await 208 | } 209 | 210 | /// Returns the capabilities of the backend. 211 | async fn capabilities() -> miette::Result { 212 | let result = Factory::negotiate_capabilities(NegotiateCapabilitiesParams { 213 | capabilities: FrontendCapabilities {}, 214 | }) 215 | .await?; 216 | 217 | Ok(result.capabilities) 218 | } 219 | 220 | /// Frontend implementation for building a conda package. 221 | async fn build(factory: T, manifest_path: &Path) -> miette::Result<()> { 222 | let channel_config = ChannelConfig::default_with_root_dir( 223 | manifest_path 224 | .parent() 225 | .expect("manifest should always reside in a directory") 226 | .to_path_buf(), 227 | ); 228 | 229 | let protocol = initialize(factory, manifest_path).await?; 230 | let work_dir = TempDir::new_in(".") 231 | .into_diagnostic() 232 | .context("failed to create a temporary directory in the current directory")?; 233 | 234 | let result = protocol 235 | .conda_build(CondaBuildParams { 236 | host_platform: None, 237 | build_platform_virtual_packages: None, 238 | channel_base_urls: None, 239 | channel_configuration: ChannelConfiguration { 240 | base_url: channel_config.channel_alias, 241 | }, 242 | outputs: None, 243 | work_directory: work_dir.path().to_path_buf(), 244 | variant_configuration: None, 245 | editable: false, 246 | }) 247 | .await?; 248 | 249 | for package in result.packages { 250 | eprintln!("Successfully build '{}'", package.output_file.display()); 251 | eprintln!("Use following globs to revalidate: "); 252 | for glob in package.input_globs { 253 | eprintln!(" - {}", glob); 254 | } 255 | } 256 | 257 | Ok(()) 258 | } 259 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/common/configuration.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use miette::IntoDiagnostic; 4 | use pixi_build_types::PlatformAndVirtualPackages; 5 | use rattler_build::{ 6 | NormalizedKey, 7 | metadata::{Directories, PlatformWithVirtualPackages}, 8 | recipe::variable::Variable, 9 | }; 10 | use rattler_conda_types::ChannelUrl; 11 | use rattler_virtual_packages::VirtualPackageOverrides; 12 | use url::Url; 13 | 14 | /// Returns the [`BuildConfigurationParams`] that will be used to construct a BuildConfiguration 15 | pub fn build_configuration( 16 | channels: Vec, 17 | build_platform: Option, 18 | host_platform: Option, 19 | variant: BTreeMap, 20 | directories: Directories, 21 | ) -> miette::Result { 22 | let build_platform = build_platform.map(|p| PlatformWithVirtualPackages { 23 | platform: p.platform, 24 | virtual_packages: p.virtual_packages.unwrap_or_default(), 25 | }); 26 | 27 | let host_platform = host_platform.map(|p| PlatformWithVirtualPackages { 28 | platform: p.platform, 29 | virtual_packages: p.virtual_packages.unwrap_or_default(), 30 | }); 31 | 32 | let (build_platform, host_platform) = match (build_platform, host_platform) { 33 | (Some(build_platform), Some(host_platform)) => (build_platform, host_platform), 34 | (build_platform, host_platform) => { 35 | let current_platform = rattler_build::metadata::PlatformWithVirtualPackages::detect( 36 | &VirtualPackageOverrides::from_env(), 37 | ) 38 | .into_diagnostic()?; 39 | ( 40 | build_platform.unwrap_or_else(|| current_platform.clone()), 41 | host_platform.unwrap_or(current_platform), 42 | ) 43 | } 44 | }; 45 | 46 | let channels = channels.into_iter().map(Into::into).collect(); 47 | 48 | let params = BuildConfigurationParams { 49 | channels, 50 | build_platform, 51 | host_platform, 52 | variant, 53 | directories, 54 | }; 55 | 56 | Ok(params) 57 | } 58 | 59 | /// The parameters used to construct a BuildConfiguration 60 | #[derive(Debug)] 61 | pub struct BuildConfigurationParams { 62 | pub channels: Vec, 63 | pub build_platform: PlatformWithVirtualPackages, 64 | pub host_platform: PlatformWithVirtualPackages, 65 | pub variant: BTreeMap, 66 | pub directories: Directories, 67 | } 68 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/common/mod.rs: -------------------------------------------------------------------------------- 1 | //! Common utilities that are shared between the different build backends. 2 | mod configuration; 3 | mod requirements; 4 | mod variants; 5 | 6 | pub use configuration::{BuildConfigurationParams, build_configuration}; 7 | pub use requirements::{PackageRequirements, SourceRequirements, requirements}; 8 | pub use variants::compute_variants; 9 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/common/requirements.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{BTreeMap, HashMap}; 2 | 3 | use rattler_build::{ 4 | NormalizedKey, 5 | recipe::{parser::Requirements, variable::Variable}, 6 | }; 7 | use serde::Serialize; 8 | 9 | use crate::{ 10 | PackageSpec, ProjectModel, Targets, dependencies::ExtractedDependencies, traits::Dependencies, 11 | }; 12 | 13 | pub struct PackageRequirements { 14 | /// Requirements for rattler-build 15 | pub requirements: Requirements, 16 | 17 | /// The source requirements 18 | pub source: SourceRequirements

, 19 | } 20 | 21 | #[derive(Debug, Serialize)] 22 | #[serde(bound( 23 | serialize = "<::Spec as PackageSpec>::SourceSpec: Serialize" 24 | ))] 25 | pub struct SourceRequirements { 26 | /// Source package specification for build dependencies 27 | pub build: HashMap::Spec as PackageSpec>::SourceSpec>, 28 | 29 | /// Source package specification for host dependencies 30 | pub host: HashMap::Spec as PackageSpec>::SourceSpec>, 31 | 32 | /// Source package specification for runtime dependencies 33 | pub run: HashMap::Spec as PackageSpec>::SourceSpec>, 34 | } 35 | 36 | /// Return requirements for the given project model 37 | pub fn requirements( 38 | dependencies: Dependencies<::Spec>, 39 | variant: &BTreeMap, 40 | ) -> miette::Result> { 41 | let build = ExtractedDependencies::from_dependencies(dependencies.build, variant)?; 42 | let host = ExtractedDependencies::from_dependencies(dependencies.host, variant)?; 43 | let run = ExtractedDependencies::from_dependencies(dependencies.run, variant)?; 44 | 45 | Ok(PackageRequirements { 46 | requirements: Requirements { 47 | build: build.dependencies, 48 | host: host.dependencies, 49 | run: run.dependencies, 50 | ..Default::default() 51 | }, 52 | source: SourceRequirements { 53 | build: build.sources, 54 | host: host.sources, 55 | run: run.sources, 56 | }, 57 | }) 58 | } 59 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/common/variants.rs: -------------------------------------------------------------------------------- 1 | //! Variants trait 2 | //! 3 | use std::collections::BTreeMap; 4 | 5 | use miette::IntoDiagnostic; 6 | use rattler_build::{NormalizedKey, recipe::variable::Variable, variant_config::VariantConfig}; 7 | use rattler_conda_types::Platform; 8 | 9 | use crate::ProjectModel; 10 | 11 | /// Return variants for the given project model 12 | pub fn compute_variants( 13 | project_model: &P, 14 | input_variant_configuration: Option>>, 15 | host_platform: Platform, 16 | ) -> miette::Result>> { 17 | // Create a variant config from the variant configuration in the parameters. 18 | let variant_config = VariantConfig { 19 | variants: input_variant_configuration.unwrap_or_default(), 20 | pin_run_as_build: None, 21 | zip_keys: None, 22 | }; 23 | 24 | // Determine the variant keys that are used in the recipe. 25 | let used_variants = project_model.used_variants(Some(host_platform)); 26 | 27 | // Determine the combinations of the used variants. 28 | variant_config 29 | .combinations(&used_variants, None) 30 | .into_diagnostic() 31 | } 32 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/compilers.rs: -------------------------------------------------------------------------------- 1 | //! We could expose the `default_compiler` function from the `rattler-build` crate 2 | 3 | use rattler_conda_types::Platform; 4 | 5 | pub fn default_compiler(platform: Platform, language: &str) -> Option { 6 | Some( 7 | match language { 8 | // Platform agnostic compilers 9 | "fortran" => "gfortran", 10 | lang if !["c", "cxx"].contains(&lang) => lang, 11 | // Platform specific compilers 12 | _ => { 13 | if platform.is_windows() { 14 | match language { 15 | "c" => "vs2019", 16 | "cxx" => "vs2019", 17 | _ => unreachable!(), 18 | } 19 | } else if platform.is_osx() { 20 | match language { 21 | "c" => "clang", 22 | "cxx" => "clangxx", 23 | _ => unreachable!(), 24 | } 25 | } else if matches!(platform, Platform::EmscriptenWasm32) { 26 | match language { 27 | "c" => "emscripten", 28 | "cxx" => "emscripten", 29 | _ => unreachable!(), 30 | } 31 | } else { 32 | match language { 33 | "c" => "gcc", 34 | "cxx" => "gxx", 35 | _ => unreachable!(), 36 | } 37 | } 38 | } 39 | } 40 | .to_string(), 41 | ) 42 | } 43 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/consts.rs: -------------------------------------------------------------------------------- 1 | pub use pixi_consts::consts::*; 2 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/dependencies.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::{BTreeMap, HashMap}, 3 | str::FromStr, 4 | }; 5 | 6 | use miette::{Context, IntoDiagnostic}; 7 | use pixi_build_types as pbt; 8 | use rattler_build::{ 9 | NormalizedKey, 10 | recipe::{parser::Dependency, variable::Variable}, 11 | }; 12 | use rattler_conda_types::{MatchSpec, NamelessMatchSpec, PackageName, ParseStrictness::Strict}; 13 | 14 | use crate::traits::PackageSpec; 15 | 16 | /// A helper struct to extract match specs from a manifest. 17 | #[derive(Default)] 18 | pub struct MatchspecExtractor<'a> { 19 | variant: Option<&'a BTreeMap>, 20 | } 21 | 22 | pub struct ExtractedMatchSpecs { 23 | pub specs: Vec, 24 | pub sources: HashMap, 25 | } 26 | 27 | impl<'a> MatchspecExtractor<'a> { 28 | pub fn new() -> Self { 29 | Self::default() 30 | } 31 | 32 | /// Sets the variant to use for the match specs. 33 | pub fn with_variant(self, variant: &'a BTreeMap) -> Self { 34 | Self { 35 | variant: Some(variant), 36 | } 37 | } 38 | 39 | /// Extracts match specs from the given set of dependencies. 40 | pub fn extract<'b, S>( 41 | &self, 42 | dependencies: impl IntoIterator, 43 | ) -> miette::Result> 44 | where 45 | S: PackageSpec + 'b, 46 | { 47 | let mut specs = Vec::new(); 48 | let mut source_specs = HashMap::new(); 49 | for (name, spec) in dependencies.into_iter() { 50 | let name = PackageName::from_str(name.as_str()).into_diagnostic()?; 51 | // If we have a variant override, we should use that instead of the spec. 52 | if spec.can_be_used_as_variant() { 53 | if let Some(variant_value) = self 54 | .variant 55 | .as_ref() 56 | .and_then(|variant| variant.get(&NormalizedKey::from(&name))) 57 | { 58 | let spec = NamelessMatchSpec::from_str( 59 | variant_value.as_ref().as_str().wrap_err_with(|| { 60 | miette::miette!("Variant {variant_value} needs to be a string") 61 | })?, 62 | Strict, 63 | ) 64 | .into_diagnostic() 65 | .context("failed to convert variant to matchspec")?; 66 | specs.push(MatchSpec::from_nameless(spec, Some(name))); 67 | continue; 68 | } 69 | } 70 | 71 | // Match on supported packages 72 | let (match_spec, source_spec) = spec.to_match_spec(name.clone())?; 73 | 74 | specs.push(match_spec); 75 | if let Some(source_spec) = source_spec { 76 | source_specs.insert(name.as_normalized().to_owned(), source_spec); 77 | } 78 | } 79 | 80 | Ok(ExtractedMatchSpecs { 81 | specs, 82 | sources: source_specs, 83 | }) 84 | } 85 | } 86 | 87 | pub struct ExtractedDependencies { 88 | pub dependencies: Vec, 89 | pub sources: HashMap, 90 | } 91 | 92 | impl ExtractedDependencies { 93 | pub fn from_dependencies<'a>( 94 | dependencies: impl IntoIterator, 95 | variant: &BTreeMap, 96 | ) -> miette::Result 97 | where 98 | T: 'a, 99 | { 100 | let extracted_specs = MatchspecExtractor::new() 101 | .with_variant(variant) 102 | .extract(dependencies)?; 103 | 104 | Ok(Self { 105 | dependencies: extracted_specs 106 | .specs 107 | .into_iter() 108 | .map(Dependency::Spec) 109 | .collect(), 110 | sources: extracted_specs.sources, 111 | }) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | pub mod protocol; 3 | pub mod server; 4 | 5 | pub mod cache; 6 | pub mod common; 7 | pub mod compilers; 8 | mod consts; 9 | pub mod dependencies; 10 | pub mod project; 11 | pub mod source; 12 | pub mod tools; 13 | pub mod traits; 14 | pub mod utils; 15 | pub mod variants; 16 | 17 | pub use traits::{PackageSourceSpec, PackageSpec, ProjectModel, TargetSelector, Targets}; 18 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/project.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use miette::IntoDiagnostic; 4 | use pixi_build_type_conversions::to_project_model_v1; 5 | use pixi_build_types::VersionedProjectModel; 6 | use rattler_conda_types::ChannelConfig; 7 | 8 | /// Convert manifest to project model 9 | pub fn to_project_model( 10 | manifest_path: &Path, 11 | channel_config: &ChannelConfig, 12 | highest_supported_project_model: Option, 13 | ) -> miette::Result> { 14 | // Load the manifest 15 | let manifest = 16 | pixi_manifest::Manifests::from_workspace_manifest_path(manifest_path.to_path_buf())?; 17 | let package = manifest.value.package.as_ref(); 18 | 19 | // Determine which project model version to use 20 | let version_to_use = match highest_supported_project_model { 21 | // If a specific version is requested, use it (or fail if it's higher than what we support) 22 | Some(requested_version) => { 23 | let our_highest = VersionedProjectModel::highest_version(); 24 | if requested_version > our_highest { 25 | miette::bail!( 26 | "Requested project model version {} is higher than our highest supported version {}", 27 | requested_version, 28 | our_highest 29 | ); 30 | } 31 | // Use the requested version 32 | requested_version 33 | } 34 | // If no specific version is requested, use our highest supported version 35 | None => VersionedProjectModel::highest_version(), 36 | }; 37 | 38 | // This can be null in the rattler-build backend 39 | let versioned = package 40 | .map(|manifest| { 41 | let result = match version_to_use { 42 | 1 => to_project_model_v1(&manifest.value, channel_config).into_diagnostic()?, 43 | _ => { 44 | miette::bail!( 45 | "Unsupported project model version: {}", 46 | VersionedProjectModel::highest_version() 47 | ); 48 | } 49 | }; 50 | Ok(VersionedProjectModel::from(result)) 51 | }) 52 | .transpose()?; 53 | 54 | Ok(versioned) 55 | } 56 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/protocol.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use pixi_build_types::procedures::{ 4 | conda_build::{CondaBuildParams, CondaBuildResult}, 5 | conda_metadata::{CondaMetadataParams, CondaMetadataResult}, 6 | initialize::{InitializeParams, InitializeResult}, 7 | negotiate_capabilities::{NegotiateCapabilitiesParams, NegotiateCapabilitiesResult}, 8 | }; 9 | 10 | /// A trait that is used to instantiate a new protocol connection 11 | /// and endpoint that can handle the RPC calls. 12 | #[async_trait::async_trait] 13 | pub trait ProtocolInstantiator: Send + Sync + 'static { 14 | /// Get the debug directory 15 | /// If set, internal state will be logged as files in that directory 16 | fn debug_dir(configuration: Option) -> Option; 17 | 18 | /// Called when negotiating capabilities with the client. 19 | /// This is determine how the rest of the initialization will proceed. 20 | async fn negotiate_capabilities( 21 | params: NegotiateCapabilitiesParams, 22 | ) -> miette::Result; 23 | 24 | /// Called when the client requests initialization. 25 | /// Returns the protocol endpoint and the result of the initialization. 26 | async fn initialize( 27 | &self, 28 | params: InitializeParams, 29 | ) -> miette::Result<(Box, InitializeResult)>; 30 | } 31 | 32 | /// A trait that defines the protocol for a pixi build backend. 33 | /// These are implemented by the different backends. Which 34 | /// server as an endpoint for the RPC calls. 35 | #[async_trait::async_trait] 36 | pub trait Protocol { 37 | /// Get the debug directory 38 | /// If set, internal state will be logged as files in that directory 39 | fn debug_dir(&self) -> Option<&Path>; 40 | 41 | /// Called when the client requests metadata for a Conda package. 42 | async fn conda_get_metadata( 43 | &self, 44 | _params: CondaMetadataParams, 45 | ) -> miette::Result { 46 | unimplemented!("conda_get_metadata not implemented"); 47 | } 48 | 49 | /// Called when the client requests to build a Conda package. 50 | async fn conda_build(&self, _params: CondaBuildParams) -> miette::Result { 51 | unimplemented!("conda_build not implemented"); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/server.rs: -------------------------------------------------------------------------------- 1 | use std::{net::SocketAddr, path::Path, sync::Arc}; 2 | 3 | use fs_err::tokio as tokio_fs; 4 | use jsonrpc_core::{Error, IoHandler, Params, serde_json, to_value}; 5 | use miette::{Context, IntoDiagnostic, JSONReportHandler}; 6 | use pixi_build_types::VersionedProjectModel; 7 | use pixi_build_types::procedures::{ 8 | self, conda_build::CondaBuildParams, conda_metadata::CondaMetadataParams, 9 | initialize::InitializeParams, negotiate_capabilities::NegotiateCapabilitiesParams, 10 | }; 11 | 12 | use tokio::sync::RwLock; 13 | 14 | use crate::protocol::{Protocol, ProtocolInstantiator}; 15 | 16 | /// A JSONRPC server that can be used to communicate with a client. 17 | pub struct Server { 18 | instatiator: T, 19 | } 20 | 21 | enum ServerState { 22 | /// Server has not been initialized yet. 23 | Uninitialized(T), 24 | /// Server has been initialized, with a protocol. 25 | Initialized(Box), 26 | } 27 | 28 | impl ServerState { 29 | /// Convert to a protocol, if the server has been initialized. 30 | pub fn as_endpoint( 31 | &self, 32 | ) -> Result<&(dyn Protocol + Send + Sync + 'static), jsonrpc_core::Error> { 33 | match self { 34 | Self::Initialized(protocol) => Ok(protocol.as_ref()), 35 | _ => Err(Error::invalid_request()), 36 | } 37 | } 38 | } 39 | 40 | impl Server { 41 | pub fn new(instatiator: T) -> Self { 42 | Self { instatiator } 43 | } 44 | 45 | /// Run the server, communicating over stdin/stdout. 46 | pub async fn run(self) -> miette::Result<()> { 47 | let io = self.setup_io(); 48 | jsonrpc_stdio_server::ServerBuilder::new(io).build().await; 49 | Ok(()) 50 | } 51 | 52 | /// Run the server, communicating over HTTP. 53 | pub fn run_over_http(self, port: u16) -> miette::Result<()> { 54 | let io = self.setup_io(); 55 | jsonrpc_http_server::ServerBuilder::new(io) 56 | .start_http(&SocketAddr::from(([127, 0, 0, 1], port))) 57 | .into_diagnostic()? 58 | .wait(); 59 | Ok(()) 60 | } 61 | 62 | /// Setup the IO inner handler. 63 | fn setup_io(self) -> IoHandler { 64 | // Construct a server 65 | let mut io = IoHandler::new(); 66 | io.add_method( 67 | procedures::negotiate_capabilities::METHOD_NAME, 68 | move |params: Params| async move { 69 | let params: NegotiateCapabilitiesParams = params.parse()?; 70 | let result = T::negotiate_capabilities(params) 71 | .await 72 | .map_err(convert_error)?; 73 | Ok(to_value(result).expect("failed to convert to json")) 74 | }, 75 | ); 76 | 77 | let state = Arc::new(RwLock::new(ServerState::Uninitialized(self.instatiator))); 78 | let initialize_state = state.clone(); 79 | io.add_method( 80 | procedures::initialize::METHOD_NAME, 81 | move |params: Params| { 82 | let state = initialize_state.clone(); 83 | 84 | async move { 85 | let params: InitializeParams = params.parse()?; 86 | let mut state = state.write().await; 87 | let ServerState::Uninitialized(initializer) = &mut *state else { 88 | return Err(Error::invalid_request()); 89 | }; 90 | 91 | let debug_dir = T::debug_dir(params.configuration.clone()); 92 | let _ = 93 | log_initialize(debug_dir.as_deref(), params.project_model.clone()).await; 94 | 95 | let (protocol_endpoint, result) = initializer 96 | .initialize(params) 97 | .await 98 | .map_err(convert_error)?; 99 | *state = ServerState::Initialized(protocol_endpoint); 100 | 101 | Ok(to_value(result).expect("failed to convert to json")) 102 | } 103 | }, 104 | ); 105 | 106 | let conda_get_metadata = state.clone(); 107 | io.add_method( 108 | procedures::conda_metadata::METHOD_NAME, 109 | move |params: Params| { 110 | let state = conda_get_metadata.clone(); 111 | 112 | async move { 113 | let params: CondaMetadataParams = params.parse()?; 114 | let state = state.read().await; 115 | let endpoint = state.as_endpoint()?; 116 | 117 | let debug_dir = endpoint.debug_dir(); 118 | log_conda_get_metadata(debug_dir, ¶ms) 119 | .await 120 | .map_err(convert_error)?; 121 | 122 | endpoint 123 | .conda_get_metadata(params) 124 | .await 125 | .map(|value| to_value(value).expect("failed to convert to json")) 126 | .map_err(convert_error) 127 | } 128 | }, 129 | ); 130 | 131 | let conda_build = state.clone(); 132 | io.add_method( 133 | procedures::conda_build::METHOD_NAME, 134 | move |params: Params| { 135 | let state = conda_build.clone(); 136 | 137 | async move { 138 | let params: CondaBuildParams = params.parse()?; 139 | let state = state.read().await; 140 | let endpoint = state.as_endpoint()?; 141 | 142 | let debug_dir = endpoint.debug_dir(); 143 | log_conda_build(debug_dir, ¶ms) 144 | .await 145 | .map_err(convert_error)?; 146 | 147 | endpoint 148 | .conda_build(params) 149 | .await 150 | .map(|value| to_value(value).expect("failed to convert to json")) 151 | .map_err(convert_error) 152 | } 153 | }, 154 | ); 155 | 156 | io 157 | } 158 | } 159 | 160 | fn convert_error(err: miette::Report) -> jsonrpc_core::Error { 161 | let rendered = JSONReportHandler::new(); 162 | let mut json_str = String::new(); 163 | rendered 164 | .render_report(&mut json_str, err.as_ref()) 165 | .expect("failed to convert error to json"); 166 | let data = serde_json::from_str(&json_str).expect("failed to parse json error"); 167 | jsonrpc_core::Error { 168 | code: jsonrpc_core::ErrorCode::ServerError(-32000), 169 | message: err.to_string(), 170 | data: Some(data), 171 | } 172 | } 173 | 174 | async fn log_initialize( 175 | debug_dir: Option<&Path>, 176 | project_model: Option, 177 | ) -> miette::Result<()> { 178 | let Some(debug_dir) = debug_dir else { 179 | return Ok(()); 180 | }; 181 | 182 | let project_model = project_model 183 | .ok_or_else(|| miette::miette!("project model is required if debug_dir is given"))? 184 | .into_v1() 185 | .ok_or_else(|| miette::miette!("project model needs to be v1"))?; 186 | 187 | let project_model_json = serde_json::to_string_pretty(&project_model) 188 | .into_diagnostic() 189 | .context("failed to serialize project model to JSON")?; 190 | 191 | let project_model_path = debug_dir.join("project_model.json"); 192 | tokio_fs::write(&project_model_path, project_model_json) 193 | .await 194 | .into_diagnostic() 195 | .context("failed to write project model JSON to file")?; 196 | Ok(()) 197 | } 198 | 199 | async fn log_conda_get_metadata( 200 | debug_dir: Option<&Path>, 201 | params: &CondaMetadataParams, 202 | ) -> miette::Result<()> { 203 | let Some(debug_dir) = debug_dir else { 204 | return Ok(()); 205 | }; 206 | 207 | let json = serde_json::to_string_pretty(¶ms) 208 | .into_diagnostic() 209 | .context("failed to serialize parameters to JSON")?; 210 | 211 | tokio_fs::create_dir_all(&debug_dir) 212 | .await 213 | .into_diagnostic() 214 | .context("failed to create data directory")?; 215 | 216 | let path = debug_dir.join("conda_metadata_params.json"); 217 | tokio_fs::write(&path, json) 218 | .await 219 | .into_diagnostic() 220 | .context("failed to write JSON to file")?; 221 | Ok(()) 222 | } 223 | 224 | async fn log_conda_build( 225 | debug_dir: Option<&Path>, 226 | params: &CondaBuildParams, 227 | ) -> miette::Result<()> { 228 | let Some(debug_dir) = debug_dir else { 229 | return Ok(()); 230 | }; 231 | 232 | let json = serde_json::to_string_pretty(¶ms) 233 | .into_diagnostic() 234 | .context("failed to serialize parameters to JSON")?; 235 | 236 | tokio_fs::create_dir_all(&debug_dir) 237 | .await 238 | .into_diagnostic() 239 | .context("failed to create data directory")?; 240 | 241 | let path = debug_dir.join("conda_build_params.json"); 242 | tokio_fs::write(&path, json) 243 | .await 244 | .into_diagnostic() 245 | .context("failed to write JSON to file")?; 246 | Ok(()) 247 | } 248 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/source.rs: -------------------------------------------------------------------------------- 1 | use miette::{MietteError, MietteSpanContents, SourceCode, SourceSpan, SpanContents}; 2 | use std::path::PathBuf; 3 | use std::{path::Path, sync::Arc}; 4 | 5 | /// The contents of a specific source file together with the name of the source 6 | /// file. 7 | /// 8 | /// The name of the source file is used to identify the source file in error 9 | /// messages. 10 | #[derive(Debug, Clone)] 11 | pub struct Source { 12 | /// The name of the source. 13 | pub name: String, 14 | /// The source code. 15 | pub code: Arc, 16 | /// The actual path to the source file. 17 | pub path: PathBuf, 18 | } 19 | 20 | impl Source { 21 | /// Constructs a new instance by loading the source code from a file. 22 | /// 23 | /// The root directory is used to calculate the relative path of the source 24 | /// which is then used as the name of the source. 25 | pub fn from_rooted_path(root_dir: &Path, path: PathBuf) -> std::io::Result { 26 | let relative_path = pathdiff::diff_paths(&path, root_dir); 27 | let name = relative_path 28 | .as_deref() 29 | .map(|path| path.as_os_str()) 30 | .or_else(|| path.file_name()) 31 | .map(|p| p.to_string_lossy()) 32 | .unwrap_or_default() 33 | .into_owned(); 34 | 35 | let contents = fs_err::read_to_string(&path)?; 36 | Ok(Self { 37 | name, 38 | code: Arc::from(contents.as_str()), 39 | path, 40 | }) 41 | } 42 | } 43 | 44 | impl AsRef for Source { 45 | fn as_ref(&self) -> &str { 46 | self.code.as_ref() 47 | } 48 | } 49 | 50 | impl SourceCode for Source { 51 | fn read_span<'a>( 52 | &'a self, 53 | span: &SourceSpan, 54 | context_lines_before: usize, 55 | context_lines_after: usize, 56 | ) -> Result + 'a>, MietteError> { 57 | let inner_contents = 58 | self.as_ref() 59 | .read_span(span, context_lines_before, context_lines_after)?; 60 | let contents = MietteSpanContents::new_named( 61 | self.name.clone(), 62 | inner_contents.data(), 63 | *inner_contents.span(), 64 | inner_contents.line(), 65 | inner_contents.column(), 66 | inner_contents.line_count(), 67 | ); 68 | Ok(Box::new(contents)) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/tools.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::{BTreeMap, HashMap}, 3 | path::PathBuf, 4 | }; 5 | 6 | use chrono::Utc; 7 | use indexmap::IndexSet; 8 | use itertools::Itertools; 9 | use miette::IntoDiagnostic; 10 | use pixi_build_types::procedures::conda_metadata::CondaMetadataParams; 11 | use rattler_build::{ 12 | hash::HashInfo, 13 | metadata::{ 14 | BuildConfiguration, Directories, Output, PackageIdentifier, PackagingSettings, 15 | PlatformWithVirtualPackages, 16 | }, 17 | recipe::{ 18 | Jinja, ParsingError, Recipe, 19 | parser::{GlobVec, find_outputs_from_src}, 20 | variable::Variable, 21 | }, 22 | selectors::SelectorConfig, 23 | system_tools::SystemTools, 24 | variant_config::{DiscoveredOutput, ParseErrors, VariantConfig}, 25 | }; 26 | use rattler_build::{metadata::Debug, recipe::parser::BuildString}; 27 | use rattler_conda_types::{GenericVirtualPackage, Platform, package::ArchiveType}; 28 | use rattler_package_streaming::write::CompressionLevel; 29 | use rattler_virtual_packages::VirtualPackageOverrides; 30 | use url::Url; 31 | 32 | use crate::source::Source; 33 | 34 | /// A `recipe.yaml` file might be accompanied by a `variants.toml` file from 35 | /// which we can read variant configuration for that specific recipe.. 36 | pub const VARIANTS_CONFIG_FILE: &str = "variants.yaml"; 37 | 38 | /// A struct that contains all the configuration needed 39 | /// for `rattler-build` in order to build a recipe. 40 | /// The principal concepts is that all rattler-build concepts 41 | /// should be hidden behind this struct and all pixi-build-backends 42 | /// should only interact with this struct. 43 | pub struct RattlerBuild { 44 | /// The source of the recipe 45 | pub recipe_source: Source, 46 | /// The selector configuration. 47 | pub selector_config: SelectorConfig, 48 | /// The directory where the build should happen. 49 | pub work_directory: PathBuf, 50 | } 51 | 52 | impl RattlerBuild { 53 | /// Create a new `RattlerBuild` instance. 54 | pub fn new(source: Source, selector_config: SelectorConfig, work_directory: PathBuf) -> Self { 55 | Self { 56 | recipe_source: source, 57 | selector_config, 58 | work_directory, 59 | } 60 | } 61 | 62 | /// Create a `SelectorConfig` from the given `CondaMetadataParams`. 63 | pub fn selector_config_from(params: &CondaMetadataParams) -> SelectorConfig { 64 | SelectorConfig { 65 | target_platform: params 66 | .build_platform 67 | .as_ref() 68 | .map(|p| p.platform) 69 | .unwrap_or(Platform::current()), 70 | host_platform: params 71 | .host_platform 72 | .as_ref() 73 | .map(|p| p.platform) 74 | .unwrap_or(Platform::current()), 75 | build_platform: params 76 | .build_platform 77 | .as_ref() 78 | .map(|p| p.platform) 79 | .unwrap_or(Platform::current()), 80 | hash: None, 81 | variant: Default::default(), 82 | experimental: true, 83 | allow_undefined: false, 84 | } 85 | } 86 | 87 | /// Discover the outputs from the recipe. 88 | pub fn discover_outputs( 89 | &self, 90 | variant_config_input: &Option>>, 91 | ) -> miette::Result> { 92 | // First find all outputs from the recipe 93 | let outputs = find_outputs_from_src(self.recipe_source.clone())?; 94 | 95 | // Check if there is a `variants.yaml` file next to the recipe that we should 96 | // potentially use. 97 | let mut variant_configs = None; 98 | if let Some(variant_path) = self 99 | .recipe_source 100 | .path 101 | .parent() 102 | .map(|parent| parent.join(VARIANTS_CONFIG_FILE)) 103 | { 104 | if variant_path.is_file() { 105 | variant_configs = Some(vec![variant_path]); 106 | } 107 | }; 108 | 109 | let variant_configs = variant_configs.unwrap_or_default(); 110 | 111 | let mut variant_config = 112 | VariantConfig::from_files(&variant_configs, &self.selector_config)?; 113 | 114 | if let Some(variant_config_input) = variant_config_input { 115 | for (k, v) in variant_config_input.iter() { 116 | let variables = v.iter().map(|v| Variable::from_string(v)).collect(); 117 | variant_config.variants.insert(k.as_str().into(), variables); 118 | } 119 | } 120 | 121 | Ok(variant_config.find_variants( 122 | &outputs, 123 | self.recipe_source.clone(), 124 | &self.selector_config, 125 | )?) 126 | } 127 | 128 | /// Get the outputs from the recipe. 129 | pub fn get_outputs( 130 | &self, 131 | discovered_outputs: &IndexSet, 132 | channels: Vec, 133 | build_vpkgs: Vec, 134 | host_vpkgs: Vec, 135 | host_platform: Platform, 136 | build_platform: Platform, 137 | ) -> miette::Result> { 138 | let mut outputs = Vec::new(); 139 | 140 | let mut subpackages = BTreeMap::new(); 141 | 142 | let channels = channels.into_iter().map(Into::into).collect_vec(); 143 | for discovered_output in discovered_outputs { 144 | let hash = HashInfo::from_variant( 145 | &discovered_output.used_vars, 146 | &discovered_output.noarch_type, 147 | ); 148 | 149 | let selector_config = SelectorConfig { 150 | variant: discovered_output.used_vars.clone(), 151 | hash: Some(hash.clone()), 152 | target_platform: self.selector_config.target_platform, 153 | host_platform: self.selector_config.host_platform, 154 | build_platform: self.selector_config.build_platform, 155 | experimental: true, 156 | allow_undefined: false, 157 | }; 158 | 159 | let mut recipe = Recipe::from_node(&discovered_output.node, selector_config.clone()) 160 | .map_err(|err| { 161 | let errs: ParseErrors<_> = err 162 | .into_iter() 163 | .map(|err| ParsingError::from_partial(self.recipe_source.clone(), err)) 164 | .collect::>() 165 | .into(); 166 | errs 167 | })?; 168 | 169 | recipe.build.string = BuildString::Resolved(BuildString::compute( 170 | &discovered_output.hash, 171 | recipe.build.number, 172 | )); 173 | 174 | for source in &mut recipe.source { 175 | if let rattler_build::recipe::parser::Source::Path(path_source) = source { 176 | let include = path_source 177 | .filter 178 | .include_globs() 179 | .iter() 180 | .map(|g| g.source()) 181 | .collect(); 182 | let exclude = path_source 183 | .filter 184 | .exclude_globs() 185 | .iter() 186 | .map(|g| g.source()) 187 | .chain([".pixi"]) 188 | .collect(); 189 | path_source.filter = GlobVec::from_vec(include, Some(exclude)); 190 | } 191 | } 192 | 193 | if recipe.build().skip() { 194 | eprintln!( 195 | "Skipping build for variant: {:#?}", 196 | discovered_output.used_vars 197 | ); 198 | continue; 199 | } 200 | 201 | let jinja = Jinja::new(selector_config); 202 | 203 | subpackages.insert( 204 | recipe.package().name().clone(), 205 | PackageIdentifier { 206 | name: recipe.package().name().clone(), 207 | version: recipe.package().version().version().clone().into(), 208 | build_string: recipe 209 | .build() 210 | .string() 211 | .resolve(&hash, recipe.build().number(), &jinja) 212 | .into_owned(), 213 | }, 214 | ); 215 | 216 | let name = recipe.package().name().clone(); 217 | 218 | outputs.push(Output { 219 | recipe, 220 | build_configuration: BuildConfiguration { 221 | target_platform: discovered_output.target_platform, 222 | host_platform: PlatformWithVirtualPackages { 223 | platform: host_platform, 224 | virtual_packages: host_vpkgs.clone(), 225 | }, 226 | build_platform: PlatformWithVirtualPackages { 227 | platform: build_platform, 228 | virtual_packages: build_vpkgs.clone(), 229 | }, 230 | hash, 231 | variant: discovered_output.used_vars.clone(), 232 | directories: Directories::setup( 233 | name.as_normalized(), 234 | &self.recipe_source.path, 235 | &self.work_directory, 236 | true, 237 | &Utc::now(), 238 | ) 239 | .into_diagnostic()?, 240 | channels: channels.clone(), 241 | channel_priority: Default::default(), 242 | solve_strategy: Default::default(), 243 | timestamp: chrono::Utc::now(), 244 | subpackages: subpackages.clone(), 245 | packaging_settings: PackagingSettings::from_args( 246 | ArchiveType::Conda, 247 | CompressionLevel::default(), 248 | ), 249 | store_recipe: false, 250 | force_colors: true, 251 | sandbox_config: None, 252 | debug: Debug::new(false), 253 | }, 254 | finalized_dependencies: None, 255 | finalized_cache_dependencies: None, 256 | finalized_cache_sources: None, 257 | finalized_sources: None, 258 | system_tools: SystemTools::new(), 259 | build_summary: Default::default(), 260 | extra_meta: None, 261 | }); 262 | } 263 | 264 | Ok(outputs) 265 | } 266 | 267 | /// Detect the virtual packages. 268 | pub fn detect_virtual_packages( 269 | vpkgs: Option>, 270 | ) -> miette::Result> { 271 | let vpkgs = match vpkgs { 272 | Some(vpkgs) => vpkgs, 273 | None => { 274 | PlatformWithVirtualPackages::detect(&VirtualPackageOverrides::from_env()) 275 | .into_diagnostic()? 276 | .virtual_packages 277 | } 278 | }; 279 | Ok(vpkgs) 280 | } 281 | } 282 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/traits/mod.rs: -------------------------------------------------------------------------------- 1 | //! Collection of traits for package specifications and project models 2 | //! 3 | //! The main entry point is the [`ProjectModel`] trait which defines the core 4 | //! interface for a project model. 5 | //! 6 | //! Any backend that will deal with Project (from pixi frontend as example) 7 | //! should implement this. 8 | #![deny(missing_docs)] 9 | 10 | pub mod package_spec; 11 | pub mod project; 12 | pub mod targets; 13 | 14 | pub use package_spec::{AnyVersion, BinarySpecExt, PackageSourceSpec, PackageSpec}; 15 | pub use project::ProjectModel; 16 | pub use targets::{Dependencies, TargetSelector, Targets}; 17 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/traits/package_spec.rs: -------------------------------------------------------------------------------- 1 | //! Package specification traits 2 | //! 3 | //! # Key components 4 | //! 5 | //! * [`PackageSpec`] - Core trait for package specification behavior 6 | //! * [`AnyVersion`] - Trait for creating wildcard version specifications that 7 | //! can match any version 8 | //! * [`BinarySpecExt`] - Extension for converting binary specs to nameless 9 | //! match specs 10 | 11 | use std::fmt::Debug; 12 | use std::sync::Arc; 13 | 14 | use miette::IntoDiagnostic; 15 | use pixi_build_types::{self as pbt}; 16 | use rattler_conda_types::{Channel, MatchSpec, NamelessMatchSpec, PackageName}; 17 | 18 | /// Get the * version for the version type, that is currently being used 19 | pub trait AnyVersion { 20 | /// Get the * version for the version type, that is currently being used 21 | fn any() -> Self; 22 | } 23 | 24 | /// Convert a binary spec to a nameless match spec 25 | pub trait BinarySpecExt { 26 | /// Return a NamelessMatchSpec from the binary spec 27 | fn to_nameless(&self) -> NamelessMatchSpec; 28 | } 29 | 30 | /// A trait that define the package spec interface 31 | pub trait PackageSpec: Send { 32 | /// Source representation of a package 33 | type SourceSpec: PackageSourceSpec; 34 | 35 | /// Returns true if the specified [`PackageSpec`] is a valid variant spec. 36 | fn can_be_used_as_variant(&self) -> bool; 37 | 38 | /// Converts the package spec to a match spec. 39 | fn to_match_spec( 40 | &self, 41 | name: PackageName, 42 | ) -> miette::Result<(MatchSpec, Option)>; 43 | } 44 | 45 | /// A trait that defines the package source spec interface 46 | pub trait PackageSourceSpec: Debug + Send { 47 | /// Convert this instance into a v1 instance. 48 | fn to_v1(self) -> pbt::SourcePackageSpecV1; 49 | } 50 | 51 | impl PackageSpec for pbt::PackageSpecV1 { 52 | type SourceSpec = pbt::SourcePackageSpecV1; 53 | 54 | fn can_be_used_as_variant(&self) -> bool { 55 | match self { 56 | pbt::PackageSpecV1::Binary(boxed_spec) => { 57 | let pbt::BinaryPackageSpecV1 { 58 | version, 59 | build, 60 | build_number, 61 | file_name, 62 | channel, 63 | subdir, 64 | md5, 65 | sha256, 66 | } = &**boxed_spec; 67 | 68 | version == &Some(rattler_conda_types::VersionSpec::Any) 69 | && build.is_none() 70 | && build_number.is_none() 71 | && file_name.is_none() 72 | && channel.is_none() 73 | && subdir.is_none() 74 | && md5.is_none() 75 | && sha256.is_none() 76 | } 77 | _ => false, 78 | } 79 | } 80 | 81 | fn to_match_spec( 82 | &self, 83 | name: PackageName, 84 | ) -> miette::Result<(MatchSpec, Option)> { 85 | match self { 86 | pbt::PackageSpecV1::Binary(binary_spec) => { 87 | let match_spec = if binary_spec.version == Some("*".parse().unwrap()) { 88 | // Skip dependencies with wildcard versions. 89 | name.as_normalized() 90 | .to_string() 91 | .parse::() 92 | .into_diagnostic()? 93 | } else { 94 | MatchSpec::from_nameless(binary_spec.to_nameless(), Some(name)) 95 | }; 96 | Ok((match_spec, None)) 97 | } 98 | pbt::PackageSpecV1::Source(source_spec) => Ok(( 99 | MatchSpec { 100 | name: Some(name), 101 | ..MatchSpec::default() 102 | }, 103 | Some(source_spec.clone()), 104 | )), 105 | } 106 | } 107 | } 108 | 109 | impl AnyVersion for pbt::PackageSpecV1 { 110 | fn any() -> Self { 111 | pbt::PackageSpecV1::Binary(Box::new(rattler_conda_types::VersionSpec::Any.into())) 112 | } 113 | } 114 | 115 | impl BinarySpecExt for pbt::BinaryPackageSpecV1 { 116 | fn to_nameless(&self) -> NamelessMatchSpec { 117 | NamelessMatchSpec { 118 | version: self.version.clone(), 119 | build: self.build.clone(), 120 | build_number: self.build_number.clone(), 121 | file_name: self.file_name.clone(), 122 | channel: self 123 | .channel 124 | .as_ref() 125 | .map(|url| Arc::new(Channel::from_url(url.clone()))), 126 | subdir: self.subdir.clone(), 127 | md5: self.md5.as_ref().map(|m| m.0), 128 | sha256: self.sha256.as_ref().map(|s| s.0), 129 | namespace: None, 130 | url: None, 131 | extras: None, 132 | license: None, 133 | } 134 | } 135 | } 136 | 137 | impl PackageSourceSpec for pbt::SourcePackageSpecV1 { 138 | fn to_v1(self) -> pbt::SourcePackageSpecV1 { 139 | self 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/traits/project.rs: -------------------------------------------------------------------------------- 1 | //! Project behaviour traits. 2 | //! 3 | //! # Key components 4 | //! 5 | //! * [`ProjectModel`] - Core trait for project model interface 6 | 7 | use std::collections::HashSet; 8 | 9 | use itertools::Itertools; 10 | use pixi_build_types::{self as pbt}; 11 | use rattler_build::NormalizedKey; 12 | use rattler_conda_types::{Platform, Version}; 13 | 14 | use super::{Dependencies, PackageSpec, targets::Targets}; 15 | 16 | /// A trait that defines the project model interface 17 | pub trait ProjectModel { 18 | /// The targets type of the project model 19 | type Targets: Targets; 20 | 21 | /// Return the targets of the project model 22 | fn targets(&self) -> Option<&Self::Targets>; 23 | 24 | /// Return the dependencies of the project model 25 | fn dependencies( 26 | &self, 27 | platform: Option, 28 | ) -> Dependencies<<::Targets as Targets>::Spec> { 29 | self.targets() 30 | .map(|t| t.dependencies(platform)) 31 | .unwrap_or_default() 32 | } 33 | 34 | /// Return the used variants of the project model 35 | fn used_variants(&self, platform: Option) -> HashSet; 36 | 37 | /// Return the name of the project model 38 | fn name(&self) -> &str; 39 | 40 | /// Return the version of the project model 41 | fn version(&self) -> &Option; 42 | } 43 | 44 | impl ProjectModel for pbt::ProjectModelV1 { 45 | type Targets = pbt::TargetsV1; 46 | 47 | fn targets(&self) -> Option<&Self::Targets> { 48 | self.targets.as_ref() 49 | } 50 | 51 | fn name(&self) -> &str { 52 | &self.name 53 | } 54 | 55 | fn version(&self) -> &Option { 56 | &self.version 57 | } 58 | 59 | fn used_variants(&self, platform: Option) -> HashSet { 60 | let build_dependencies = self 61 | .targets() 62 | .iter() 63 | .flat_map(|target| target.build_dependencies(platform)) 64 | .collect_vec(); 65 | 66 | let host_dependencies = self 67 | .targets() 68 | .iter() 69 | .flat_map(|target| target.host_dependencies(platform)) 70 | .collect_vec(); 71 | 72 | let run_dependencies = self 73 | .targets() 74 | .iter() 75 | .flat_map(|target| target.run_dependencies(platform)) 76 | .collect_vec(); 77 | 78 | let used_variants = build_dependencies 79 | .iter() 80 | .chain(host_dependencies.iter()) 81 | .chain(run_dependencies.iter()) 82 | .filter(|(_, spec)| spec.can_be_used_as_variant()) 83 | .map(|(name, _)| name.as_str().into()) 84 | .collect(); 85 | 86 | used_variants 87 | } 88 | } 89 | 90 | /// Return a spec of a project model that matches any version 91 | pub fn new_spec() -> <

::Targets as Targets>::Spec { 92 | P::Targets::empty_spec() 93 | } 94 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/traits/targets.rs: -------------------------------------------------------------------------------- 1 | //! Targets behaviour traits. 2 | //! 3 | //! # Key components 4 | //! 5 | //! * [`Targets`] - A project target trait. 6 | //! * [`TargetSelector`] - An extension trait that extends the target selector with additional functionality. 7 | //! * [`Dependencies`] - A wrapper struct that contains all dependencies for a target. 8 | use indexmap::IndexMap; 9 | use itertools::{Either, Itertools}; 10 | use pixi_build_types::{PackageSpecV1, SourcePackageName}; 11 | use rattler_conda_types::Platform; 12 | 13 | use crate::PackageSpec; 14 | use pixi_build_types::{self as pbt}; 15 | 16 | /// A trait that extend the target selector with additional functionality. 17 | pub trait TargetSelector { 18 | /// Does the target selector match the platform? 19 | fn matches(&self, platform: Platform) -> bool; 20 | } 21 | 22 | #[derive(Debug)] 23 | /// A wrapper struct that contains all dependencies for a target 24 | pub struct Dependencies<'a, S> { 25 | /// The run dependencies 26 | pub run: IndexMap<&'a SourcePackageName, &'a S>, 27 | /// The host dependencies 28 | pub host: IndexMap<&'a SourcePackageName, &'a S>, 29 | /// The build dependencies 30 | pub build: IndexMap<&'a SourcePackageName, &'a S>, 31 | } 32 | 33 | impl Default for Dependencies<'_, S> { 34 | fn default() -> Self { 35 | Self::empty() 36 | } 37 | } 38 | 39 | impl<'a, S> Dependencies<'a, S> { 40 | /// Create a new Dependencies 41 | pub fn new( 42 | run: IndexMap<&'a SourcePackageName, &'a S>, 43 | host: IndexMap<&'a SourcePackageName, &'a S>, 44 | build: IndexMap<&'a SourcePackageName, &'a S>, 45 | ) -> Self { 46 | Self { run, host, build } 47 | } 48 | 49 | /// Return an empty Dependencies 50 | pub fn empty() -> Self { 51 | Self { 52 | run: IndexMap::new(), 53 | host: IndexMap::new(), 54 | build: IndexMap::new(), 55 | } 56 | } 57 | 58 | /// Return true if the dependencies contains the given package name 59 | pub fn contains(&self, name: &SourcePackageName) -> bool { 60 | self.run.contains_key(name) || self.host.contains_key(name) || self.build.contains_key(name) 61 | } 62 | } 63 | 64 | /// A trait that represent a project target. 65 | pub trait Targets { 66 | /// The selector, in pixi this is something like `[target.linux-64] 67 | type Selector: TargetSelector; 68 | /// The target it is resolving to 69 | type Target; 70 | 71 | /// The Spec type that is used in the package spec 72 | type Spec: PackageSpec; 73 | 74 | /// Returns the default target. 75 | fn default_target(&self) -> Option<&Self::Target>; 76 | 77 | /// Return a spec that matches any version 78 | fn empty_spec() -> Self::Spec; 79 | 80 | /// Returns all targets 81 | fn targets(&self) -> impl Iterator; 82 | 83 | /// Return all dependencies for the given platform 84 | fn dependencies(&self, platform: Option) -> Dependencies; 85 | 86 | /// Return the run dependencies for the given platform 87 | fn run_dependencies( 88 | &self, 89 | platform: Option, 90 | ) -> IndexMap<&SourcePackageName, &Self::Spec>; 91 | 92 | /// Return the host dependencies for the given platform 93 | fn host_dependencies( 94 | &self, 95 | platform: Option, 96 | ) -> IndexMap<&SourcePackageName, &Self::Spec>; 97 | 98 | /// Return the build dependencies for the given platform 99 | fn build_dependencies( 100 | &self, 101 | platform: Option, 102 | ) -> IndexMap<&SourcePackageName, &Self::Spec>; 103 | 104 | /// Resolve the target for the given platform. 105 | fn resolve(&self, platform: Option) -> impl Iterator { 106 | if let Some(platform) = platform { 107 | let iter = self 108 | .default_target() 109 | .into_iter() 110 | .chain(self.targets().filter_map(move |(selector, target)| { 111 | if selector.matches(platform) { 112 | Some(target) 113 | } else { 114 | None 115 | } 116 | })); 117 | Either::Right(iter) 118 | } else { 119 | Either::Left(self.default_target().into_iter()) 120 | } 121 | } 122 | } 123 | 124 | // === Below here are the implementations for v1 === 125 | impl TargetSelector for pbt::TargetSelectorV1 { 126 | fn matches(&self, platform: Platform) -> bool { 127 | match self { 128 | pbt::TargetSelectorV1::Platform(p) => p == &platform.to_string(), 129 | pbt::TargetSelectorV1::Linux => platform.is_linux(), 130 | pbt::TargetSelectorV1::Unix => platform.is_unix(), 131 | pbt::TargetSelectorV1::Win => platform.is_windows(), 132 | pbt::TargetSelectorV1::MacOs => platform.is_osx(), 133 | } 134 | } 135 | } 136 | 137 | impl Targets for pbt::TargetsV1 { 138 | type Selector = pbt::TargetSelectorV1; 139 | type Target = pbt::TargetV1; 140 | 141 | type Spec = pbt::PackageSpecV1; 142 | 143 | fn default_target(&self) -> Option<&pbt::TargetV1> { 144 | self.default_target.as_ref() 145 | } 146 | 147 | fn targets(&self) -> impl Iterator { 148 | self.targets.iter().flatten() 149 | } 150 | 151 | fn empty_spec() -> PackageSpecV1 { 152 | pbt::PackageSpecV1::Binary(Box::new(rattler_conda_types::VersionSpec::Any.into())) 153 | } 154 | 155 | fn run_dependencies( 156 | &self, 157 | platform: Option, 158 | ) -> IndexMap<&SourcePackageName, &PackageSpecV1> { 159 | let targets = self.resolve(platform).collect_vec(); 160 | 161 | targets 162 | .iter() 163 | .flat_map(|t| t.run_dependencies.iter()) 164 | .flatten() 165 | .collect::>() 166 | } 167 | 168 | fn host_dependencies( 169 | &self, 170 | platform: Option, 171 | ) -> IndexMap<&SourcePackageName, &PackageSpecV1> { 172 | let targets = self.resolve(platform).collect_vec(); 173 | 174 | targets 175 | .iter() 176 | .flat_map(|t| t.host_dependencies.iter()) 177 | .flatten() 178 | .collect::>() 179 | } 180 | 181 | fn build_dependencies( 182 | &self, 183 | platform: Option, 184 | ) -> IndexMap<&SourcePackageName, &PackageSpecV1> { 185 | let targets = self.resolve(platform).collect_vec(); 186 | 187 | targets 188 | .iter() 189 | .flat_map(|t| t.build_dependencies.iter()) 190 | .flatten() 191 | .collect::>() 192 | } 193 | 194 | fn dependencies(&self, platform: Option) -> Dependencies { 195 | let build_deps = self.build_dependencies(platform); 196 | let host_deps = self.host_dependencies(platform); 197 | let run_deps = self.run_dependencies(platform); 198 | 199 | Dependencies::new(run_deps, host_deps, build_deps) 200 | } 201 | } 202 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | mod temporary_recipe; 2 | 3 | pub use temporary_recipe::TemporaryRenderedRecipe; 4 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/utils/temporary_recipe.rs: -------------------------------------------------------------------------------- 1 | use miette::{IntoDiagnostic, WrapErr}; 2 | use rattler_build::metadata::Output; 3 | use std::future::Future; 4 | use std::io::BufWriter; 5 | use std::path::PathBuf; 6 | 7 | /// A helper struct that owns a temporary file containing a rendered recipe. 8 | /// If `finish` is not called, the temporary file will stay on disk for 9 | /// debugging purposes. 10 | pub struct TemporaryRenderedRecipe { 11 | file: PathBuf, 12 | } 13 | 14 | impl TemporaryRenderedRecipe { 15 | pub fn from_output(output: &Output) -> miette::Result { 16 | // Ensure that the output directory exists 17 | std::fs::create_dir_all(&output.build_configuration.directories.output_dir) 18 | .into_diagnostic() 19 | .context("failed to create output directory")?; 20 | 21 | let (recipe_file, recipe_path) = tempfile::Builder::new() 22 | .prefix(".rendered-recipe") 23 | .suffix(".yaml") 24 | .tempfile_in(&output.build_configuration.directories.output_dir) 25 | .into_diagnostic() 26 | .context("failed to create temporary file for recipe")? 27 | .into_parts(); 28 | 29 | // Write the recipe back to a file 30 | serde_yaml::to_writer(BufWriter::new(recipe_file), &output.recipe) 31 | .into_diagnostic() 32 | .context("failed to write recipe to temporary file")?; 33 | 34 | Ok(Self { 35 | file: recipe_path.keep().unwrap(), 36 | }) 37 | } 38 | 39 | pub async fn within_context_async< 40 | R, 41 | Fut: Future>, 42 | F: FnOnce() -> Fut, 43 | >( 44 | self, 45 | operation: F, 46 | ) -> miette::Result { 47 | let result = operation().await?; 48 | std::fs::remove_file(self.file) 49 | .into_diagnostic() 50 | .context("failed to remove temporary recipe file")?; 51 | Ok(result) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /crates/pixi-build-backend/src/variants.rs: -------------------------------------------------------------------------------- 1 | use pixi_build_types as pbt; 2 | use rattler_conda_types::VersionSpec; 3 | 4 | /// Returns true if the specified [`pbt::PackageSpecV1`] is a valid variant spec. 5 | /// 6 | /// At the moment, a spec that allows any version is considered a variant spec. 7 | pub fn can_be_used_as_variant(spec: &pbt::PackageSpecV1) -> bool { 8 | match spec { 9 | pbt::PackageSpecV1::Binary(boxed_spec) => { 10 | let pbt::BinaryPackageSpecV1 { 11 | version, 12 | build, 13 | build_number, 14 | file_name, 15 | channel, 16 | subdir, 17 | md5, 18 | sha256, 19 | } = &**boxed_spec; 20 | 21 | version == &Some(VersionSpec::Any) 22 | && build.is_none() 23 | && build_number.is_none() 24 | && file_name.is_none() 25 | && channel.is_none() 26 | && subdir.is_none() 27 | && md5.is_none() 28 | && sha256.is_none() 29 | } 30 | _ => false, 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pixi-build-cmake" 3 | version = "0.1.10" 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | chrono = { workspace = true } 9 | indexmap = { workspace = true } 10 | miette = { workspace = true } 11 | minijinja = { workspace = true } 12 | rattler_conda_types = { workspace = true } 13 | rattler_package_streaming = { workspace = true } 14 | rattler-build = { workspace = true } 15 | serde = { workspace = true, features = ["derive"] } 16 | serde_json = { workspace = true } 17 | tempfile = { workspace = true } 18 | tokio = { workspace = true, features = ["macros"] } 19 | 20 | pixi-build-backend = { workspace = true } 21 | 22 | pixi_build_types = { workspace = true } 23 | pixi_manifest = { workspace = true } 24 | pixi_build_type_conversions = { workspace = true } 25 | 26 | [dev-dependencies] 27 | insta = { version = "1.42.1", features = ["yaml", "redactions", "filters"] } 28 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/build_script.j2: -------------------------------------------------------------------------------- 1 | ninja --version 2 | cmake --version 3 | 4 | {# Windows #} 5 | {% if build_platform == "windows" -%} 6 | if not exist %SRC_DIR%\..\build\build.ninja ( 7 | cmake %CMAKE_ARGS% ^ 8 | -GNinja ^ 9 | -DCMAKE_BUILD_TYPE=Release ^ 10 | -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^ 11 | -DCMAKE_EXPORT_COMPILE_COMMANDS=ON ^ 12 | -DBUILD_SHARED_LIBS=ON ^ 13 | {{ extra_args | join(" ") }} ^ 14 | -B %SRC_DIR%\..\build ^ 15 | -S "{{ source_dir }}" 16 | @if errorlevel 1 exit 1 17 | ) 18 | cmake --build %SRC_DIR%\..\build --target install 19 | @if errorlevel 1 exit 1 20 | 21 | {# Non Windows #} 22 | {% else -%} 23 | if [ ! -f "$SRC_DIR/../build/build.ninja" ]; then 24 | cmake $CMAKE_ARGS \ 25 | -GNinja \ 26 | -DCMAKE_BUILD_TYPE=Release \ 27 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 28 | -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ 29 | -DBUILD_SHARED_LIBS=ON \ 30 | {{ extra_args | join(" ") }} \ 31 | -B $SRC_DIR/../build \ 32 | -S "{{ source_dir }}" 33 | fi 34 | cmake --build $SRC_DIR/../build --target install 35 | {% endif -%} 36 | 37 | {% if build_platform == "windows" -%} 38 | @if errorlevel 1 exit 1 39 | {% endif %} 40 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/build_script.rs: -------------------------------------------------------------------------------- 1 | use minijinja::Environment; 2 | use serde::Serialize; 3 | 4 | #[derive(Serialize)] 5 | pub struct BuildScriptContext { 6 | pub build_platform: BuildPlatform, 7 | pub source_dir: String, 8 | pub extra_args: Vec, 9 | } 10 | 11 | #[derive(Serialize)] 12 | #[serde(rename_all = "kebab-case")] 13 | pub enum BuildPlatform { 14 | Windows, 15 | Unix, 16 | } 17 | 18 | impl BuildScriptContext { 19 | pub fn render(&self) -> Vec { 20 | let env = Environment::new(); 21 | let template = env 22 | .template_from_str(include_str!("build_script.j2")) 23 | .unwrap(); 24 | let rendered = template.render(self).unwrap().to_string(); 25 | rendered.lines().map(|s| s.to_string()).collect() 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/cmake.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::BTreeMap, 3 | path::{Path, PathBuf}, 4 | str::FromStr, 5 | }; 6 | 7 | use crate::{ 8 | build_script::{BuildPlatform, BuildScriptContext}, 9 | config::CMakeBackendConfig, 10 | }; 11 | use miette::IntoDiagnostic; 12 | use pixi_build_backend::common::{PackageRequirements, SourceRequirements}; 13 | use pixi_build_backend::{ProjectModel, Targets}; 14 | use pixi_build_backend::{ 15 | common::{BuildConfigurationParams, requirements}, 16 | compilers::default_compiler, 17 | traits::{Dependencies, project::new_spec}, 18 | }; 19 | use rattler_build::metadata::Debug; 20 | use rattler_build::recipe::parser::{BuildString, ScriptContent}; 21 | use rattler_build::{ 22 | NormalizedKey, 23 | console_utils::LoggingOutputHandler, 24 | hash::HashInfo, 25 | metadata::{BuildConfiguration, PackagingSettings}, 26 | recipe::{ 27 | Recipe, 28 | parser::{Build, Dependency, Package, Script}, 29 | variable::Variable, 30 | }, 31 | }; 32 | use rattler_conda_types::{MatchSpec, NoArchType, PackageName, Platform, package::ArchiveType}; 33 | use rattler_package_streaming::write::CompressionLevel; 34 | 35 | pub struct CMakeBuildBackend { 36 | pub(crate) logging_output_handler: LoggingOutputHandler, 37 | pub(crate) manifest_path: PathBuf, 38 | pub(crate) manifest_root: PathBuf, 39 | pub(crate) project_model: P, 40 | pub(crate) config: CMakeBackendConfig, 41 | pub(crate) cache_dir: Option, 42 | } 43 | 44 | impl CMakeBuildBackend

{ 45 | /// Returns a new instance of [`CMakeBuildBackend`] by reading the manifest 46 | /// at the given path. 47 | pub fn new( 48 | manifest_path: &Path, 49 | project_model: P, 50 | config: CMakeBackendConfig, 51 | logging_output_handler: LoggingOutputHandler, 52 | cache_dir: Option, 53 | ) -> miette::Result { 54 | // Determine the root directory of the manifest 55 | let manifest_root = manifest_path 56 | .parent() 57 | .ok_or_else(|| miette::miette!("the project manifest must reside in a directory"))? 58 | .to_path_buf(); 59 | 60 | Ok(Self { 61 | manifest_path: manifest_path.to_path_buf(), 62 | manifest_root, 63 | project_model, 64 | config, 65 | logging_output_handler, 66 | cache_dir, 67 | }) 68 | } 69 | 70 | /// Returns the matchspecs for the compiler packages. That should be 71 | /// included in the build section of the recipe. 72 | fn compiler_packages(&self, target_platform: Platform) -> Vec { 73 | let mut compilers = vec![]; 74 | 75 | for lang in self.languages() { 76 | if let Some(name) = default_compiler(target_platform, &lang) { 77 | // TODO: Read this from variants 78 | // TODO: Read the version specification from variants 79 | let compiler_package = 80 | PackageName::new_unchecked(format!("{name}_{target_platform}")); 81 | compilers.push(MatchSpec::from(compiler_package)); 82 | } 83 | 84 | // TODO: stdlib?? 85 | } 86 | 87 | compilers 88 | } 89 | 90 | /// Returns the languages that are used in the cmake project. These define 91 | /// which compilers are required to build the project. 92 | fn languages(&self) -> Vec { 93 | // TODO: Can we figure this out from looking at the CMake? 94 | vec!["cxx".to_string()] 95 | } 96 | 97 | /// Constructs a [`Recipe`] from the current manifest. The constructed 98 | /// recipe will invoke CMake to build and install the package. 99 | pub(crate) fn recipe( 100 | &self, 101 | host_platform: Platform, 102 | variant: &BTreeMap, 103 | ) -> miette::Result<(Recipe, SourceRequirements

)> { 104 | // Parse the package name from the manifest 105 | let project_model = &self.project_model; 106 | let name = PackageName::from_str(project_model.name()).into_diagnostic()?; 107 | let version = self.project_model.version().clone().ok_or_else(|| { 108 | miette::miette!("a version is missing from the package but it is required") 109 | })?; 110 | 111 | let noarch_type = NoArchType::none(); 112 | 113 | let requirements = self.requirements(host_platform, variant)?; 114 | 115 | let build_platform = Platform::current(); 116 | let build_number = 0; 117 | 118 | let build_script = BuildScriptContext { 119 | build_platform: if build_platform.is_windows() { 120 | BuildPlatform::Windows 121 | } else { 122 | BuildPlatform::Unix 123 | }, 124 | source_dir: self.manifest_root.display().to_string(), 125 | extra_args: self.config.extra_args.clone(), 126 | } 127 | .render(); 128 | 129 | let hash_info = HashInfo::from_variant(variant, &noarch_type); 130 | 131 | Ok(( 132 | Recipe { 133 | schema_version: 1, 134 | context: Default::default(), 135 | package: Package { 136 | version: version.into(), 137 | name, 138 | }, 139 | cache: None, 140 | // source: vec![Source::Path(PathSource { 141 | // // TODO: How can we use a git source? 142 | // path: manifest_root.to_path_buf(), 143 | // sha256: None, 144 | // md5: None, 145 | // patches: vec![], 146 | // target_directory: None, 147 | // file_name: None, 148 | // use_gitignore: true, 149 | // })], 150 | // We hack the source location 151 | source: vec![], 152 | build: Build { 153 | number: build_number, 154 | string: BuildString::Resolved(BuildString::compute(&hash_info, build_number)), 155 | 156 | // skip: Default::default(), 157 | script: Script { 158 | content: ScriptContent::Commands(build_script), 159 | env: self.config.env.clone(), 160 | ..Default::default() 161 | }, 162 | noarch: noarch_type, 163 | 164 | // TODO: Python is not exposed properly 165 | //python: Default::default(), 166 | // dynamic_linking: Default::default(), 167 | // always_copy_files: Default::default(), 168 | // always_include_files: Default::default(), 169 | // merge_build_and_host_envs: false, 170 | // variant: Default::default(), 171 | // prefix_detection: Default::default(), 172 | // post_process: vec![], 173 | // files: Default::default(), 174 | ..Build::default() 175 | }, 176 | // TODO read from manifest 177 | requirements: requirements.requirements, 178 | tests: vec![], 179 | about: Default::default(), 180 | extra: Default::default(), 181 | }, 182 | requirements.source, 183 | )) 184 | } 185 | 186 | pub(crate) fn requirements( 187 | &self, 188 | host_platform: Platform, 189 | variant: &BTreeMap, 190 | ) -> miette::Result> { 191 | let project_model = &self.project_model; 192 | let dependencies = project_model.dependencies(Some(host_platform)); 193 | 194 | let build_tools = build_tools(); 195 | let empty_spec = new_spec::

(); 196 | let dependencies = add_build_tools::

(dependencies, &build_tools, &empty_spec); 197 | 198 | let mut package_requirements = requirements::

(dependencies, variant)?; 199 | 200 | package_requirements.requirements.build.extend( 201 | self.compiler_packages(host_platform) 202 | .into_iter() 203 | .map(Dependency::Spec), 204 | ); 205 | 206 | Ok(package_requirements) 207 | } 208 | } 209 | 210 | /// Returns the build tools that are required to build the project. 211 | pub(crate) fn build_tools() -> Vec { 212 | vec!["cmake".to_string(), "ninja".to_string()] 213 | } 214 | 215 | /// Adds the build tools to the dependencies. 216 | pub(crate) fn add_build_tools<'a, P: ProjectModel>( 217 | mut dependencies: Dependencies<'a, ::Spec>, 218 | build_tools: &'a [String], 219 | empty_spec: &'a <

::Targets as Targets>::Spec, 220 | ) -> Dependencies<'a, <

::Targets as Targets>::Spec> { 221 | for pkg_name in build_tools.iter() { 222 | if dependencies.build.contains_key(pkg_name) { 223 | // If the host dependencies already contain the package, we don't need to add it 224 | // again. 225 | continue; 226 | } 227 | 228 | dependencies.build.insert(pkg_name, empty_spec); 229 | } 230 | 231 | dependencies 232 | } 233 | 234 | pub(crate) fn construct_configuration( 235 | recipe: &Recipe, 236 | params: BuildConfigurationParams, 237 | ) -> BuildConfiguration { 238 | BuildConfiguration { 239 | target_platform: params.host_platform.platform, 240 | host_platform: params.host_platform, 241 | build_platform: params.build_platform, 242 | hash: HashInfo::from_variant(¶ms.variant, &recipe.build.noarch), 243 | variant: params.variant, 244 | directories: params.directories, 245 | channels: params.channels, 246 | channel_priority: Default::default(), 247 | solve_strategy: Default::default(), 248 | timestamp: chrono::Utc::now(), 249 | subpackages: Default::default(), // TODO: ??? 250 | packaging_settings: PackagingSettings::from_args( 251 | ArchiveType::Conda, 252 | CompressionLevel::default(), 253 | ), 254 | store_recipe: false, 255 | force_colors: true, 256 | sandbox_config: None, 257 | debug: Debug::new(false), 258 | } 259 | } 260 | 261 | #[cfg(test)] 262 | mod tests { 263 | use std::collections::BTreeMap; 264 | 265 | use indexmap::IndexMap; 266 | use pixi_build_type_conversions::to_project_model_v1; 267 | use pixi_manifest::Manifests; 268 | use rattler_build::{console_utils::LoggingOutputHandler, recipe::Recipe}; 269 | use rattler_conda_types::{ChannelConfig, Platform}; 270 | use tempfile::tempdir; 271 | 272 | use crate::{cmake::CMakeBuildBackend, config::CMakeBackendConfig}; 273 | 274 | fn recipe(manifest_source: &str, config: CMakeBackendConfig) -> Recipe { 275 | let tmp_dir = tempdir().unwrap(); 276 | let tmp_manifest = tmp_dir.path().join("pixi.toml"); 277 | std::fs::write(&tmp_manifest, manifest_source).unwrap(); 278 | let manifest = Manifests::from_workspace_manifest_path(tmp_manifest.clone()).unwrap(); 279 | let package = manifest.value.package.unwrap(); 280 | let channel_config = ChannelConfig::default_with_root_dir(tmp_dir.path().to_path_buf()); 281 | let project_model = to_project_model_v1(&package.value, &channel_config).unwrap(); 282 | 283 | let cmake_backend = CMakeBuildBackend::new( 284 | &tmp_manifest, 285 | project_model, 286 | config, 287 | LoggingOutputHandler::default(), 288 | None, 289 | ) 290 | .unwrap(); 291 | 292 | cmake_backend 293 | .recipe(Platform::current(), &BTreeMap::new()) 294 | .unwrap() 295 | .0 296 | } 297 | 298 | #[tokio::test] 299 | async fn test_setting_host_and_build_requirements() { 300 | // get cargo manifest dir 301 | 302 | let package_with_host_and_build_deps = r#" 303 | [workspace] 304 | name = "test-reqs" 305 | channels = ["conda-forge"] 306 | platforms = ["osx-arm64"] 307 | preview = ["pixi-build"] 308 | 309 | [package] 310 | name = "test-reqs" 311 | version = "1.0" 312 | 313 | [package.host-dependencies] 314 | hatchling = "*" 315 | 316 | [package.build-dependencies] 317 | boltons = "*" 318 | 319 | [package.run-dependencies] 320 | foobar = "==3.2.1" 321 | 322 | [package.build] 323 | backend = { name = "pixi-build-python", version = "*" } 324 | "#; 325 | 326 | let tmp_dir = tempdir().unwrap(); 327 | let tmp_manifest = tmp_dir.path().join("pixi.toml"); 328 | 329 | // write the raw string into the file 330 | std::fs::write(&tmp_manifest, package_with_host_and_build_deps).unwrap(); 331 | 332 | let manifest = Manifests::from_workspace_manifest_path(tmp_manifest).unwrap(); 333 | let package = manifest.value.package.unwrap(); 334 | let channel_config = ChannelConfig::default_with_root_dir(tmp_dir.path().to_path_buf()); 335 | let project_model = to_project_model_v1(&package.value, &channel_config).unwrap(); 336 | let cmake_backend = CMakeBuildBackend::new( 337 | &package.provenance.path, 338 | project_model, 339 | CMakeBackendConfig::default(), 340 | LoggingOutputHandler::default(), 341 | None, 342 | ) 343 | .unwrap(); 344 | 345 | let host_platform = Platform::current(); 346 | 347 | let (recipe, _source_requirements) = cmake_backend 348 | .recipe(host_platform, &BTreeMap::new()) 349 | .unwrap(); 350 | insta::with_settings!({ 351 | filters => vec![ 352 | ("(vs2017|vs2019|gxx|clang).*", "\"[ ... compiler ... ]\""), 353 | ] 354 | }, { 355 | insta::assert_yaml_snapshot!(recipe, { 356 | ".build.script" => "[ ... script ... ]", 357 | }); 358 | }); 359 | } 360 | 361 | #[tokio::test] 362 | async fn test_parsing_subdirectory() { 363 | // a manifest with subdir 364 | 365 | let package_with_git_and_subdir = r#" 366 | [workspace] 367 | name = "test-reqs" 368 | channels = ["conda-forge"] 369 | platforms = ["osx-arm64"] 370 | preview = ["pixi-build"] 371 | 372 | [package] 373 | name = "test-reqs" 374 | version = "1.0" 375 | 376 | [package.build] 377 | backend = { name = "pixi-build-python", version = "*" } 378 | 379 | [package.host-dependencies] 380 | hatchling = { git = "git+https://github.com/hatchling/hatchling.git", subdirectory = "src" } 381 | "#; 382 | 383 | let tmp_dir = tempdir().unwrap(); 384 | let tmp_manifest = tmp_dir.path().join("pixi.toml"); 385 | 386 | // write the raw string into the file 387 | std::fs::write(&tmp_manifest, package_with_git_and_subdir).unwrap(); 388 | 389 | Manifests::from_workspace_manifest_path(tmp_manifest).unwrap(); 390 | } 391 | 392 | #[test] 393 | fn test_env_vars_are_set() { 394 | let manifest_source = r#" 395 | [workspace] 396 | platforms = [] 397 | channels = [] 398 | preview = ["pixi-build"] 399 | 400 | [package] 401 | name = "foobar" 402 | version = "0.1.0" 403 | 404 | [package.build] 405 | backend = { name = "pixi-build-rust", version = "*" } 406 | "#; 407 | 408 | let env = IndexMap::from([("foo".to_string(), "bar".to_string())]); 409 | 410 | let recipe = recipe( 411 | manifest_source, 412 | CMakeBackendConfig { 413 | env: env.clone(), 414 | ..Default::default() 415 | }, 416 | ); 417 | 418 | assert_eq!(recipe.build.script.env, env); 419 | } 420 | } 421 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use indexmap::IndexMap; 4 | use serde::Deserialize; 5 | 6 | #[derive(Debug, Default, Deserialize)] 7 | #[serde(rename_all = "kebab-case")] 8 | pub struct CMakeBackendConfig { 9 | /// Extra args for CMake invocation 10 | #[serde(default)] 11 | pub extra_args: Vec, 12 | /// Environment Variables 13 | #[serde(default)] 14 | pub env: IndexMap, 15 | /// If set, internal state will be logged as files in that directory 16 | pub debug_dir: Option, 17 | } 18 | 19 | #[cfg(test)] 20 | mod tests { 21 | use serde_json::json; 22 | 23 | use super::CMakeBackendConfig; 24 | 25 | #[test] 26 | fn test_ensure_deseralize_from_empty() { 27 | let json_data = json!({}); 28 | serde_json::from_value::(json_data).unwrap(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/main.rs: -------------------------------------------------------------------------------- 1 | mod build_script; 2 | mod cmake; 3 | mod config; 4 | mod protocol; 5 | 6 | use protocol::CMakeBuildBackendInstantiator; 7 | 8 | #[tokio::main] 9 | pub async fn main() { 10 | if let Err(err) = pixi_build_backend::cli::main(CMakeBuildBackendInstantiator::new).await { 11 | eprintln!("{err:?}"); 12 | std::process::exit(1); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/protocol.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | path::{Path, PathBuf}, 3 | str::FromStr, 4 | sync::Arc, 5 | }; 6 | 7 | use miette::{Context, IntoDiagnostic}; 8 | use pixi_build_backend::{ 9 | PackageSourceSpec, 10 | common::{build_configuration, compute_variants}, 11 | protocol::{Protocol, ProtocolInstantiator}, 12 | utils::TemporaryRenderedRecipe, 13 | }; 14 | use pixi_build_types::{ 15 | BackendCapabilities, CondaPackageMetadata, PlatformAndVirtualPackages, ProjectModelV1, 16 | procedures::{ 17 | conda_build::{ 18 | CondaBuildParams, CondaBuildResult, CondaBuiltPackage, CondaOutputIdentifier, 19 | }, 20 | conda_metadata::{CondaMetadataParams, CondaMetadataResult}, 21 | initialize::{InitializeParams, InitializeResult}, 22 | negotiate_capabilities::{NegotiateCapabilitiesParams, NegotiateCapabilitiesResult}, 23 | }, 24 | }; 25 | use rattler_build::{ 26 | build::run_build, 27 | console_utils::LoggingOutputHandler, 28 | hash::HashInfo, 29 | metadata::{Directories, Output}, 30 | recipe::{Jinja, parser::BuildString, variable::Variable}, 31 | render::resolved_dependencies::DependencyInfo, 32 | tool_configuration::Configuration, 33 | }; 34 | use rattler_conda_types::{ChannelConfig, MatchSpec, PackageName, Platform}; 35 | 36 | use crate::{ 37 | cmake::{CMakeBuildBackend, construct_configuration}, 38 | config::CMakeBackendConfig, 39 | }; 40 | 41 | fn input_globs() -> Vec { 42 | [ 43 | // Source files 44 | "**/*.{c,cc,cxx,cpp,h,hpp,hxx}", 45 | // CMake files 46 | "**/*.{cmake,cmake.in}", 47 | "**/CMakeFiles.txt", 48 | ] 49 | .iter() 50 | .map(|s| s.to_string()) 51 | .collect() 52 | } 53 | 54 | pub struct CMakeBuildBackendInstantiator { 55 | logging_output_handler: LoggingOutputHandler, 56 | } 57 | 58 | impl CMakeBuildBackendInstantiator { 59 | pub fn new(logging_output_handler: LoggingOutputHandler) -> Self { 60 | Self { 61 | logging_output_handler, 62 | } 63 | } 64 | } 65 | #[async_trait::async_trait] 66 | impl Protocol for CMakeBuildBackend { 67 | fn debug_dir(&self) -> Option<&Path> { 68 | self.config.debug_dir.as_deref() 69 | } 70 | 71 | async fn conda_get_metadata( 72 | &self, 73 | params: CondaMetadataParams, 74 | ) -> miette::Result { 75 | let channel_config = ChannelConfig { 76 | channel_alias: params.channel_configuration.base_url, 77 | root_dir: self.manifest_root.to_path_buf(), 78 | }; 79 | let channels = params.channel_base_urls.unwrap_or_default(); 80 | 81 | let host_platform = params 82 | .host_platform 83 | .as_ref() 84 | .map(|p| p.platform) 85 | .unwrap_or(Platform::current()); 86 | 87 | // Build the tool configuration 88 | let tool_config = Arc::new( 89 | Configuration::builder() 90 | .with_opt_cache_dir(self.cache_dir.clone()) 91 | .with_logging_output_handler(self.logging_output_handler.clone()) 92 | .with_channel_config(channel_config.clone()) 93 | .with_testing(false) 94 | .with_keep_build(true) 95 | .finish(), 96 | ); 97 | 98 | let package_name = PackageName::from_str(&self.project_model.name) 99 | .into_diagnostic() 100 | .context("`{name}` is not a valid package name")?; 101 | 102 | let directories = Directories::setup( 103 | package_name.as_normalized(), 104 | &self.manifest_path, 105 | ¶ms.work_directory, 106 | true, 107 | &chrono::Utc::now(), 108 | ) 109 | .into_diagnostic() 110 | .context("failed to setup build directories")?; 111 | 112 | // Create a variant config from the variant configuration in the parameters. 113 | let input_variant_configuration = params.variant_configuration.map(|v| { 114 | v.into_iter() 115 | .map(|(k, v)| { 116 | ( 117 | k.into(), 118 | v.into_iter().map(|v| Variable::from_string(&v)).collect(), 119 | ) 120 | }) 121 | .collect() 122 | }); 123 | let variant_combinations = compute_variants( 124 | &self.project_model, 125 | input_variant_configuration, 126 | host_platform, 127 | )?; 128 | 129 | // Construct the different outputs 130 | let mut packages = Vec::new(); 131 | for variant in variant_combinations { 132 | // TODO: Determine how and if we can determine this from the manifest. 133 | let (recipe, source_requirements) = self.recipe(host_platform, &variant)?; 134 | let build_configuration_params = build_configuration( 135 | channels.clone(), 136 | params.build_platform.clone(), 137 | params.host_platform.clone(), 138 | variant.clone(), 139 | directories.clone(), 140 | )?; 141 | let build_configuration = construct_configuration(&recipe, build_configuration_params); 142 | let output = Output { 143 | build_configuration, 144 | recipe, 145 | finalized_dependencies: None, 146 | finalized_cache_dependencies: None, 147 | finalized_cache_sources: None, 148 | finalized_sources: None, 149 | build_summary: Arc::default(), 150 | system_tools: Default::default(), 151 | extra_meta: None, 152 | }; 153 | 154 | let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; 155 | let tool_config = tool_config.clone(); 156 | let output = temp_recipe 157 | .within_context_async(move || async move { 158 | output 159 | .resolve_dependencies(&tool_config) 160 | .await 161 | .into_diagnostic() 162 | }) 163 | .await?; 164 | 165 | let selector_config = output.build_configuration.selector_config(); 166 | 167 | let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); 168 | 169 | let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); 170 | let build_string = output.recipe.build().string().resolve( 171 | &hash, 172 | output.recipe.build().number(), 173 | &jinja, 174 | ); 175 | 176 | let finalized_deps = &output 177 | .finalized_dependencies 178 | .as_ref() 179 | .expect("dependencies should be resolved at this point") 180 | .run; 181 | 182 | packages.push(CondaPackageMetadata { 183 | name: output.name().clone(), 184 | version: output.version().clone(), 185 | build: build_string.to_string(), 186 | build_number: output.recipe.build.number, 187 | subdir: output.build_configuration.target_platform, 188 | depends: finalized_deps 189 | .depends 190 | .iter() 191 | .map(DependencyInfo::spec) 192 | .map(MatchSpec::to_string) 193 | .collect(), 194 | constraints: finalized_deps 195 | .constraints 196 | .iter() 197 | .map(DependencyInfo::spec) 198 | .map(MatchSpec::to_string) 199 | .collect(), 200 | license: output.recipe.about.license.map(|l| l.to_string()), 201 | license_family: output.recipe.about.license_family, 202 | noarch: output.recipe.build.noarch, 203 | sources: source_requirements 204 | .run 205 | .into_iter() 206 | .map(|(name, spec)| (name, spec.to_v1())) 207 | .collect(), 208 | }); 209 | } 210 | 211 | Ok(CondaMetadataResult { 212 | packages, 213 | input_globs: None, 214 | }) 215 | } 216 | 217 | async fn conda_build(&self, params: CondaBuildParams) -> miette::Result { 218 | let channel_config = ChannelConfig { 219 | channel_alias: params.channel_configuration.base_url, 220 | root_dir: self.manifest_root.to_path_buf(), 221 | }; 222 | let channels = params.channel_base_urls.unwrap_or_default(); 223 | let host_platform = params 224 | .host_platform 225 | .as_ref() 226 | .map(|p| p.platform) 227 | .unwrap_or_else(Platform::current); 228 | 229 | let package_name = PackageName::from_str(&self.project_model.name) 230 | .into_diagnostic() 231 | .context("`{name}` is not a valid package name")?; 232 | 233 | let directories = Directories::setup( 234 | package_name.as_normalized(), 235 | &self.manifest_path, 236 | ¶ms.work_directory, 237 | true, 238 | &chrono::Utc::now(), 239 | ) 240 | .into_diagnostic() 241 | .context("failed to setup build directories")?; 242 | 243 | // Recompute all the variant combinations 244 | let input_variant_configuration = params.variant_configuration.map(|v| { 245 | v.into_iter() 246 | .map(|(k, v)| { 247 | ( 248 | k.into(), 249 | v.into_iter().map(|v| Variable::from_string(&v)).collect(), 250 | ) 251 | }) 252 | .collect() 253 | }); 254 | let variant_combinations = compute_variants( 255 | &self.project_model, 256 | input_variant_configuration, 257 | host_platform, 258 | )?; 259 | 260 | // Compute outputs for each variant 261 | let mut outputs = Vec::with_capacity(variant_combinations.len()); 262 | for variant in variant_combinations { 263 | let (recipe, _source_requirements) = self.recipe(host_platform, &variant)?; 264 | 265 | let build_configuration_params = build_configuration( 266 | channels.clone(), 267 | Some(PlatformAndVirtualPackages { 268 | platform: Platform::current(), 269 | virtual_packages: params.build_platform_virtual_packages.clone(), 270 | }), 271 | Some(PlatformAndVirtualPackages { 272 | platform: host_platform, 273 | virtual_packages: params 274 | .host_platform 275 | .clone() 276 | .and_then(|p| p.virtual_packages), 277 | }), 278 | variant.clone(), 279 | directories.clone(), 280 | )?; 281 | let build_configuration = construct_configuration(&recipe, build_configuration_params); 282 | 283 | let mut output = Output { 284 | build_configuration, 285 | recipe, 286 | finalized_dependencies: None, 287 | finalized_cache_dependencies: None, 288 | finalized_cache_sources: None, 289 | finalized_sources: None, 290 | build_summary: Arc::default(), 291 | system_tools: Default::default(), 292 | extra_meta: None, 293 | }; 294 | 295 | // Resolve the build string 296 | let selector_config = output.build_configuration.selector_config(); 297 | let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); 298 | let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); 299 | let build_string = output 300 | .recipe 301 | .build() 302 | .string() 303 | .resolve(&hash, output.recipe.build().number(), &jinja) 304 | .into_owned(); 305 | output.recipe.build.string = BuildString::Resolved(build_string); 306 | 307 | outputs.push(output); 308 | } 309 | 310 | // Setup tool configuration 311 | let tool_config = Arc::new( 312 | Configuration::builder() 313 | .with_opt_cache_dir(self.cache_dir.clone()) 314 | .with_logging_output_handler(self.logging_output_handler.clone()) 315 | .with_channel_config(channel_config.clone()) 316 | .with_testing(false) 317 | .with_keep_build(true) 318 | .finish(), 319 | ); 320 | 321 | // Determine the outputs to build 322 | let selected_outputs = if let Some(output_identifiers) = params.outputs { 323 | output_identifiers 324 | .into_iter() 325 | .filter_map(|iden| { 326 | let pos = outputs.iter().position(|output| { 327 | let CondaOutputIdentifier { 328 | name, 329 | version, 330 | build, 331 | subdir, 332 | } = &iden; 333 | name.as_ref() 334 | .is_none_or(|n| output.name().as_normalized() == n) 335 | && version 336 | .as_ref() 337 | .is_none_or(|v| output.version().to_string() == *v) 338 | && build 339 | .as_ref() 340 | .is_none_or(|b| output.build_string() == b.as_str()) 341 | && subdir 342 | .as_ref() 343 | .is_none_or(|s| output.target_platform().as_str() == s) 344 | })?; 345 | Some(outputs.remove(pos)) 346 | }) 347 | .collect() 348 | } else { 349 | outputs 350 | }; 351 | 352 | let mut packages = Vec::with_capacity(selected_outputs.len()); 353 | for output in selected_outputs { 354 | let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; 355 | let build_string = output 356 | .recipe 357 | .build 358 | .string 359 | .as_resolved() 360 | .expect("build string must have already been resolved") 361 | .to_string(); 362 | let tool_config = tool_config.clone(); 363 | let (output, package) = temp_recipe 364 | .within_context_async(move || async move { run_build(output, &tool_config).await }) 365 | .await?; 366 | let built_package = CondaBuiltPackage { 367 | output_file: package, 368 | input_globs: input_globs(), 369 | name: output.name().as_normalized().to_string(), 370 | version: output.version().to_string(), 371 | build: build_string.to_string(), 372 | subdir: output.target_platform().to_string(), 373 | }; 374 | packages.push(built_package); 375 | } 376 | 377 | Ok(CondaBuildResult { packages }) 378 | } 379 | } 380 | 381 | #[async_trait::async_trait] 382 | impl ProtocolInstantiator for CMakeBuildBackendInstantiator { 383 | fn debug_dir(configuration: Option) -> Option { 384 | configuration 385 | .and_then(|config| serde_json::from_value::(config).ok()) 386 | .and_then(|config| config.debug_dir) 387 | } 388 | 389 | async fn initialize( 390 | &self, 391 | params: InitializeParams, 392 | ) -> miette::Result<(Box, InitializeResult)> { 393 | let project_model = params 394 | .project_model 395 | .ok_or_else(|| miette::miette!("project model is required"))?; 396 | 397 | let project_model = project_model 398 | .into_v1() 399 | .ok_or_else(|| miette::miette!("project model v1 is required"))?; 400 | 401 | let config = if let Some(config) = params.configuration { 402 | serde_json::from_value(config) 403 | .into_diagnostic() 404 | .context("failed to parse configuration")? 405 | } else { 406 | CMakeBackendConfig::default() 407 | }; 408 | 409 | let instance = CMakeBuildBackend::new( 410 | params.manifest_path.as_path(), 411 | project_model, 412 | config, 413 | self.logging_output_handler.clone(), 414 | params.cache_directory, 415 | )?; 416 | 417 | Ok((Box::new(instance), InitializeResult {})) 418 | } 419 | 420 | async fn negotiate_capabilities( 421 | _params: NegotiateCapabilitiesParams, 422 | ) -> miette::Result { 423 | Ok(NegotiateCapabilitiesResult { 424 | capabilities: default_capabilities(), 425 | }) 426 | } 427 | } 428 | 429 | /// Returns the capabilities for this backend 430 | fn default_capabilities() -> BackendCapabilities { 431 | BackendCapabilities { 432 | provides_conda_metadata: Some(true), 433 | provides_conda_build: Some(true), 434 | highest_supported_project_model: Some( 435 | pixi_build_types::VersionedProjectModel::highest_version(), 436 | ), 437 | } 438 | } 439 | -------------------------------------------------------------------------------- /crates/pixi-build-cmake/src/snapshots/pixi_build_cmake__cmake__tests__setting_host_and_build_requirements.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-cmake/src/cmake.rs 3 | expression: recipe 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: test-reqs 9 | version: "1.0" 10 | build: 11 | number: 0 12 | string: hbf21a9e_0 13 | script: "[ ... script ... ]" 14 | requirements: 15 | build: 16 | - boltons 17 | - cmake 18 | - ninja 19 | - "[ ... compiler ... ]" 20 | host: 21 | - hatchling 22 | run: 23 | - foobar ==3.2.1 24 | -------------------------------------------------------------------------------- /crates/pixi-build-python/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pixi-build-python" 3 | version = "0.1.10" 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | chrono = { workspace = true } 9 | clap = { workspace = true, features = ["derive", "env"] } 10 | clap-verbosity-flag = { workspace = true } 11 | fs-err = { workspace = true } 12 | indexmap = { workspace = true } 13 | itertools = { workspace = true } 14 | log = { workspace = true } 15 | miette = { workspace = true } 16 | minijinja = { workspace = true } 17 | parking_lot = { workspace = true } 18 | rattler_conda_types = { workspace = true } 19 | rattler_package_streaming = { workspace = true } 20 | rattler_virtual_packages = { workspace = true } 21 | rattler-build = { workspace = true } 22 | reqwest = { workspace = true } 23 | reqwest-middleware = { workspace = true } 24 | serde = { workspace = true, features = ["derive"] } 25 | serde_yaml = { workspace = true } 26 | serde_json = { workspace = true } 27 | toml_edit = { workspace = true } 28 | tempfile = { workspace = true } 29 | tokio = { workspace = true, features = ["macros"] } 30 | tracing-subscriber = { workspace = true } 31 | url = { workspace = true } 32 | pyproject-toml = { workspace = true } 33 | dirs = { workspace = true } 34 | pathdiff = { workspace = true } 35 | 36 | pixi-build-backend = { workspace = true } 37 | 38 | pixi_build_types = { workspace = true } 39 | pixi_consts = { workspace = true } 40 | pixi_manifest = { workspace = true } 41 | pixi_spec = { workspace = true } 42 | pixi_build_type_conversions = { workspace = true } 43 | 44 | jsonrpc-stdio-server = { workspace = true } 45 | jsonrpc-http-server = { workspace = true } 46 | jsonrpc-core = { workspace = true } 47 | 48 | [dev-dependencies] 49 | insta = { version = "1.42.1", features = ["yaml", "redactions", "filters"] } 50 | toml_edit = { version = "0.22.24" } 51 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/build_script.j2: -------------------------------------------------------------------------------- 1 | {% set PYTHON="%PYTHON%" if build_platform == "windows" else "$PYTHON" -%} 2 | {% set SRC_DIR = manifest_root if editable else ("%SRC_DIR%" if build_platform == "windows" else "$SRC_DIR") -%} 3 | {% set EDITABLE_OPTION = " --editable" if editable else "" -%} 4 | {% set COMMON_OPTIONS = "-vv --no-deps --no-build-isolation" + EDITABLE_OPTION -%} 5 | 6 | {% if installer == "uv" -%} 7 | uv pip install --python {{ PYTHON }} {{ COMMON_OPTIONS }} {{ SRC_DIR }} 8 | {% else %} 9 | {{ PYTHON }} -m pip install --ignore-installed {{ COMMON_OPTIONS }} {{ SRC_DIR }} 10 | {% endif -%} 11 | 12 | {% if build_platform == "windows" -%} 13 | if errorlevel 1 exit 1 14 | {% endif %} 15 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/build_script.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use minijinja::Environment; 4 | use pixi_build_backend::{ProjectModel, Targets, traits::Dependencies}; 5 | use serde::Serialize; 6 | 7 | const UV: &str = "uv"; 8 | #[derive(Serialize)] 9 | pub struct BuildScriptContext { 10 | pub installer: Installer, 11 | pub build_platform: BuildPlatform, 12 | pub editable: bool, 13 | pub manifest_root: PathBuf, 14 | } 15 | 16 | #[derive(Default, Serialize)] 17 | #[serde(rename_all = "kebab-case")] 18 | pub enum Installer { 19 | Uv, 20 | #[default] 21 | Pip, 22 | } 23 | 24 | impl Installer { 25 | pub fn package_name(&self) -> &str { 26 | match self { 27 | Installer::Uv => "uv", 28 | Installer::Pip => "pip", 29 | } 30 | } 31 | 32 | pub fn determine_installer( 33 | dependencies: &Dependencies<<

::Targets as Targets>::Spec>, 34 | ) -> Installer { 35 | // Determine the installer to use 36 | let uv = UV.to_string(); 37 | if dependencies.host.contains_key(&uv) 38 | || dependencies.run.contains_key(&uv) 39 | || dependencies.build.contains_key(&uv) 40 | { 41 | Installer::Uv 42 | } else { 43 | Installer::Pip 44 | } 45 | } 46 | } 47 | 48 | #[derive(Serialize)] 49 | #[serde(rename_all = "kebab-case")] 50 | pub enum BuildPlatform { 51 | Windows, 52 | Unix, 53 | } 54 | 55 | impl BuildScriptContext { 56 | pub fn render(&self) -> Vec { 57 | let env = Environment::new(); 58 | let template = env 59 | .template_from_str(include_str!("build_script.j2")) 60 | .unwrap(); 61 | let rendered = template.render(self).unwrap().to_string(); 62 | rendered.lines().map(|s| s.to_string()).collect() 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/config.rs: -------------------------------------------------------------------------------- 1 | use indexmap::IndexMap; 2 | use serde::Deserialize; 3 | use std::{convert::identity, path::PathBuf}; 4 | 5 | #[derive(Debug, Default, Deserialize)] 6 | #[serde(rename_all = "kebab-case")] 7 | pub struct PythonBackendConfig { 8 | /// True if the package should be build as a python noarch package. Defaults 9 | /// to `true`. 10 | #[serde(default)] 11 | pub noarch: Option, 12 | /// Environment Variables 13 | #[serde(default)] 14 | pub env: IndexMap, 15 | /// If set, internal state will be logged as files in that directory 16 | pub debug_dir: Option, 17 | } 18 | 19 | impl PythonBackendConfig { 20 | /// Whether to build a noarch package or a platform-specific package. 21 | pub fn noarch(&self) -> bool { 22 | self.noarch.is_none_or(identity) 23 | } 24 | } 25 | 26 | #[cfg(test)] 27 | mod tests { 28 | use super::PythonBackendConfig; 29 | use serde_json::json; 30 | 31 | #[test] 32 | fn test_ensure_deseralize_from_empty() { 33 | let json_data = json!({}); 34 | serde_json::from_value::(json_data).unwrap(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/main.rs: -------------------------------------------------------------------------------- 1 | mod build_script; 2 | mod config; 3 | mod protocol; 4 | mod python; 5 | 6 | use protocol::PythonBuildBackendInstantiator; 7 | 8 | #[tokio::main] 9 | pub async fn main() { 10 | if let Err(err) = pixi_build_backend::cli::main(PythonBuildBackendInstantiator::new).await { 11 | eprintln!("{err:?}"); 12 | std::process::exit(1); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__entry_point_nested.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build/src/bin/pixi-build-python/python.rs 3 | expression: entry_points.to_string() 4 | --- 5 | entry points should be a single key 6 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__entry_point_not_a_module.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build/src/bin/pixi-build-python/python.rs 3 | expression: entry_points.to_string() 4 | --- 5 | "failed to parse entry point spam-cli = blablabla: missing module and function separator" 6 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__entry_point_not_string.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build/src/bin/pixi-build-python/python.rs 3 | expression: entry_points.to_string() 4 | --- 5 | entry point value 1 should be a string 6 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__entry_points_are_read.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build/src/bin/pixi-build-python/python.rs 3 | expression: entry_points 4 | --- 5 | - "spam-cli = spam:main_cli" 6 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__noarch_none.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: "recipe(r#\"\n [workspace]\n platforms = []\n channels = []\n preview = [\"pixi-build\"]\n\n [package]\n name = \"foobar\"\n version = \"0.1.0\"\n\n [package.build]\n backend = { name = \"pixi-build-python\", version = \"*\" }\n \"#,\nPythonBackendConfig { noarch: Some(false), ..Default::default() })" 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: foobar 9 | version: 0.1.0 10 | source: 11 | - path: "[ ... path ... ]" 12 | filter: 13 | include: [] 14 | exclude: 15 | - ".pixi" 16 | build: 17 | number: 0 18 | string: hbf21a9e_0 19 | script: "[ ... script ... ]" 20 | requirements: 21 | host: 22 | - pip 23 | - python 24 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__noarch_python.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: "recipe(r#\"\n [workspace]\n platforms = []\n channels = []\n preview = [\"pixi-build\"]\n\n [package]\n name = \"foobar\"\n version = \"0.1.0\"\n\n [package.build]\n backend = { name = \"pixi-build-python\", version = \"*\" }\n \"#,\nPythonBackendConfig::default())" 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: foobar 9 | version: 0.1.0 10 | source: 11 | - path: "[ ... path ... ]" 12 | filter: 13 | include: [] 14 | exclude: 15 | - ".pixi" 16 | build: 17 | number: 0 18 | string: pyhbf21a9e_0 19 | script: "[ ... script ... ]" 20 | noarch: python 21 | requirements: 22 | host: 23 | - pip 24 | - python 25 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__recipe_from_pyproject_toml.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: recipe 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: test-scripts 9 | version: 0.1.0 10 | source: 11 | - path: "[ ... path ... ]" 12 | filter: 13 | include: [] 14 | exclude: 15 | - ".pixi" 16 | build: 17 | number: 0 18 | string: pyhbf21a9e_0 19 | script: "[ ... script ... ]" 20 | noarch: python 21 | python: 22 | entry_points: 23 | - "rich-example-main = rich_example:main" 24 | requirements: 25 | host: 26 | - pip 27 | - python 28 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__scripts_are_respected.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: recipe 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: test-scripts 9 | version: 1.2.3 10 | source: 11 | - path: "[ ... path ... ]" 12 | filter: 13 | include: [] 14 | exclude: 15 | - ".pixi" 16 | build: 17 | number: 0 18 | string: pyhbf21a9e_0 19 | script: "[ ... script ... ]" 20 | noarch: python 21 | python: 22 | entry_points: 23 | - "rich-example-main = rich_example:main" 24 | requirements: 25 | host: 26 | - pip 27 | - python 28 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__setting_host_and_build_requirements-2.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: source_requirements 4 | --- 5 | build: {} 6 | host: {} 7 | run: 8 | source: 9 | Path: 10 | path: src 11 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__setting_host_and_build_requirements.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-python/src/python.rs 3 | expression: recipe 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: test-reqs 9 | version: 1.2.3 10 | source: 11 | - path: "[ ... path ... ]" 12 | filter: 13 | include: [] 14 | exclude: 15 | - ".pixi" 16 | build: 17 | number: 0 18 | string: pyhbf21a9e_0 19 | script: "[ ... script ... ]" 20 | noarch: python 21 | requirements: 22 | build: 23 | - boltons 24 | host: 25 | - hatchling 26 | - pip 27 | - python 28 | run: 29 | - foobar >=3.2.1 30 | - source 31 | -------------------------------------------------------------------------------- /crates/pixi-build-python/src/snapshots/pixi_build_python__python__tests__wrong_entry_points.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build/src/bin/pixi-build-python/python.rs 3 | expression: entry_points.to_string() 4 | --- 5 | entry points should be a single key 6 | -------------------------------------------------------------------------------- /crates/pixi-build-rattler-build/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pixi-build-rattler-build" 3 | version = "0.1.13" 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | fs-err = { workspace = true } 9 | miette = { workspace = true } 10 | rattler_conda_types = { workspace = true } 11 | rattler_virtual_packages = { workspace = true } 12 | rattler-build = { workspace = true } 13 | serde = { workspace = true, features = ["derive"] } 14 | serde_json = { workspace = true } 15 | tempfile = { workspace = true } 16 | tokio = { workspace = true, features = ["macros"] } 17 | url = { workspace = true } 18 | pathdiff = { workspace = true } 19 | 20 | pixi-build-backend = { workspace = true } 21 | 22 | pixi_build_types = { workspace = true } 23 | 24 | [dev-dependencies] 25 | -------------------------------------------------------------------------------- /crates/pixi-build-rattler-build/src/config.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use std::path::PathBuf; 3 | 4 | #[derive(Debug, Default, Deserialize)] 5 | #[serde(rename_all = "kebab-case")] 6 | pub struct RattlerBuildBackendConfig { 7 | /// If set, internal state will be logged as files in that directory 8 | pub debug_dir: Option, 9 | } 10 | 11 | #[cfg(test)] 12 | mod tests { 13 | use super::RattlerBuildBackendConfig; 14 | use serde_json::json; 15 | 16 | #[test] 17 | fn test_ensure_deseralize_from_empty() { 18 | let json_data = json!({}); 19 | serde_json::from_value::(json_data).unwrap(); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /crates/pixi-build-rattler-build/src/main.rs: -------------------------------------------------------------------------------- 1 | mod config; 2 | mod protocol; 3 | mod rattler_build; 4 | 5 | use protocol::RattlerBuildBackendInstantiator; 6 | 7 | #[tokio::main] 8 | pub async fn main() { 9 | if let Err(err) = pixi_build_backend::cli::main(RattlerBuildBackendInstantiator::new).await { 10 | eprintln!("{err:?}"); 11 | std::process::exit(1); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /crates/pixi-build-rattler-build/src/rattler_build.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::OsStr, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | use miette::IntoDiagnostic; 7 | use pixi_build_backend::source::Source; 8 | use rattler_build::console_utils::LoggingOutputHandler; 9 | 10 | use crate::config::RattlerBuildBackendConfig; 11 | 12 | pub struct RattlerBuildBackend { 13 | pub(crate) logging_output_handler: LoggingOutputHandler, 14 | /// In case of rattler-build, manifest is the raw recipe 15 | /// We need to apply later the selectors to get the final recipe 16 | pub(crate) recipe_source: Source, 17 | pub(crate) manifest_root: PathBuf, 18 | pub(crate) cache_dir: Option, 19 | pub(crate) config: RattlerBuildBackendConfig, 20 | } 21 | 22 | impl RattlerBuildBackend { 23 | /// Returns a new instance of [`RattlerBuildBackend`] by reading the 24 | /// manifest at the given path. 25 | pub fn new( 26 | manifest_path: &Path, 27 | logging_output_handler: LoggingOutputHandler, 28 | cache_dir: Option, 29 | config: RattlerBuildBackendConfig, 30 | ) -> miette::Result { 31 | // Locate the recipe 32 | let manifest_file_name = manifest_path.file_name().and_then(OsStr::to_str); 33 | let recipe_path = match manifest_file_name { 34 | Some("recipe.yaml") | Some("recipe.yml") => manifest_path.to_path_buf(), 35 | _ => { 36 | // The manifest is not a recipe, so we need to find the recipe.yaml file. 37 | let recipe_path = manifest_path.parent().and_then(|manifest_dir| { 38 | [ 39 | "recipe.yaml", 40 | "recipe.yml", 41 | "recipe/recipe.yaml", 42 | "recipe/recipe.yml", 43 | ] 44 | .into_iter() 45 | .find_map(|relative_path| { 46 | let recipe_path = manifest_dir.join(relative_path); 47 | recipe_path.is_file().then_some(recipe_path) 48 | }) 49 | }); 50 | 51 | recipe_path.ok_or_else(|| miette::miette!("Could not find a recipe.yaml in the source directory to use as the recipe manifest."))? 52 | } 53 | }; 54 | 55 | // Load the manifest from the source directory 56 | let manifest_root = manifest_path 57 | .parent() 58 | .expect("manifest must have a root") 59 | .to_path_buf(); 60 | let recipe_source = 61 | Source::from_rooted_path(&manifest_root, recipe_path).into_diagnostic()?; 62 | 63 | Ok(Self { 64 | logging_output_handler, 65 | recipe_source, 66 | manifest_root, 67 | cache_dir, 68 | config, 69 | }) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pixi-build-rust" 3 | version = "0.1.13" 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | async-trait = { workspace = true } 8 | chrono = { workspace = true } 9 | indexmap = { workspace = true } 10 | miette = { workspace = true } 11 | minijinja = { workspace = true, features = ["json"] } 12 | rattler_conda_types = { workspace = true } 13 | rattler_package_streaming = { workspace = true } 14 | rattler-build = { workspace = true } 15 | serde = { workspace = true, features = ["derive"] } 16 | serde_json = { workspace = true } 17 | tempfile = { workspace = true } 18 | tokio = { workspace = true, features = ["macros"] } 19 | 20 | pixi-build-backend = { workspace = true } 21 | 22 | pixi_build_types = { workspace = true } 23 | pixi_manifest = { workspace = true } 24 | pixi_build_type_conversions = { workspace = true } 25 | 26 | [dev-dependencies] 27 | insta = { version = "1.42.1", features = ["yaml", "redactions", "filters"] } 28 | rstest = { workspace = true } 29 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/pixi.lock: -------------------------------------------------------------------------------- 1 | version: 6 2 | environments: 3 | default: 4 | channels: 5 | - url: https://conda.anaconda.org/conda-forge/ 6 | packages: 7 | linux-64: 8 | - conda: . 9 | osx-64: 10 | - conda: . 11 | osx-arm64: 12 | - conda: . 13 | win-64: 14 | - conda: . 15 | packages: 16 | - conda: . 17 | name: pixi-build-rust 18 | version: 0.1.0 19 | build: hbf21a9e_0 20 | subdir: noarch 21 | noarch: false 22 | input: 23 | hash: bbaea4afc16258b51b93235068288e0f3b0b07e502fc9342338977008bd63c7a 24 | globs: 25 | - pixi.toml 26 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/build_script.j2: -------------------------------------------------------------------------------- 1 | {% macro env(key) -%} 2 | {%- if is_bash %}{{ "$" ~key }}{% else %}{{ "%" ~ key ~ "%" }}{% endif -%} 3 | {% endmacro -%} 4 | {%- macro export(key, value) -%} 5 | {%- if is_bash -%} 6 | export {{ key }}={{ value }} 7 | {%- else -%} 8 | SET {{ key }}={{ value }} 9 | {%- endif -%} 10 | {%- endmacro -%} 11 | 12 | {%- if has_openssl %} 13 | {{ export("OPENSSL_DIR", env("PREFIX")|tojson) }} 14 | {%- endif %} 15 | {%- if has_sccache %} 16 | {{ export("RUSTC_WRAPPER", "sccache") }} 17 | {%- endif %} 18 | 19 | cargo install --locked --root "{{ env("PREFIX") }}" --path {{ source_dir }} --no-track {{ extra_args | join(" ") }} --force 20 | {%- if not is_bash %} 21 | if errorlevel 1 exit 1 22 | {%- endif %} 23 | 24 | {% if has_sccache %}sccache --show-stats{% endif %} 25 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/build_script.rs: -------------------------------------------------------------------------------- 1 | use minijinja::Environment; 2 | use serde::Serialize; 3 | 4 | #[derive(Serialize)] 5 | pub struct BuildScriptContext { 6 | /// The location of the source 7 | pub source_dir: String, 8 | 9 | /// Any additional args to pass to `cargo` 10 | pub extra_args: Vec, 11 | 12 | /// True if `openssl` is part of the build environment 13 | pub has_openssl: bool, 14 | 15 | /// True if `sccache` is available. 16 | pub has_sccache: bool, 17 | 18 | /// The platform that is running the build. 19 | pub is_bash: bool, 20 | } 21 | 22 | impl BuildScriptContext { 23 | pub fn render(&self) -> Vec { 24 | let env = Environment::new(); 25 | let template = env 26 | .template_from_str(include_str!("build_script.j2")) 27 | .unwrap(); 28 | let rendered = template.render(self).unwrap().to_string(); 29 | rendered.lines().map(|s| s.to_string()).collect() 30 | } 31 | } 32 | 33 | #[cfg(test)] 34 | mod test { 35 | use rstest::*; 36 | 37 | #[rstest] 38 | fn test_build_script(#[values(true, false)] is_bash: bool) { 39 | let context = super::BuildScriptContext { 40 | source_dir: String::from("my-prefix-dir"), 41 | extra_args: vec![], 42 | has_openssl: false, 43 | has_sccache: false, 44 | is_bash, 45 | }; 46 | let script = context.render(); 47 | 48 | let mut settings = insta::Settings::clone_current(); 49 | settings.set_snapshot_suffix(if is_bash { "bash" } else { "cmdexe" }); 50 | settings.bind(|| { 51 | insta::assert_snapshot!(script.join("\n")); 52 | }); 53 | } 54 | 55 | #[rstest] 56 | fn test_sccache(#[values(true, false)] is_bash: bool) { 57 | let context = super::BuildScriptContext { 58 | source_dir: String::from("my-prefix-dir"), 59 | extra_args: vec![], 60 | has_openssl: false, 61 | has_sccache: true, 62 | is_bash, 63 | }; 64 | let script = context.render(); 65 | 66 | let mut settings = insta::Settings::clone_current(); 67 | settings.set_snapshot_suffix(if is_bash { "bash" } else { "cmdexe" }); 68 | settings.bind(|| { 69 | insta::assert_snapshot!(script.join("\n")); 70 | }); 71 | } 72 | 73 | #[rstest] 74 | fn test_openssl(#[values(true, false)] is_bash: bool) { 75 | let context = super::BuildScriptContext { 76 | source_dir: String::from("my-prefix-dir"), 77 | extra_args: vec![], 78 | has_openssl: true, 79 | has_sccache: false, 80 | is_bash, 81 | }; 82 | let script = context.render(); 83 | 84 | let mut settings = insta::Settings::clone_current(); 85 | settings.set_snapshot_suffix(if is_bash { "bash" } else { "cmdexe" }); 86 | settings.bind(|| { 87 | insta::assert_snapshot!(script.join("\n")); 88 | }); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/config.rs: -------------------------------------------------------------------------------- 1 | use indexmap::IndexMap; 2 | use std::path::PathBuf; 3 | 4 | use serde::Deserialize; 5 | 6 | #[derive(Debug, Default, Deserialize)] 7 | #[serde(rename_all = "kebab-case")] 8 | pub struct RustBackendConfig { 9 | /// Extra args to pass for cargo 10 | #[serde(default)] 11 | pub extra_args: Vec, 12 | /// Environment Variables 13 | #[serde(default)] 14 | pub env: IndexMap, 15 | /// If set, internal state will be logged as files in that directory 16 | pub debug_dir: Option, 17 | } 18 | 19 | #[cfg(test)] 20 | mod tests { 21 | use super::RustBackendConfig; 22 | use serde_json::json; 23 | 24 | #[test] 25 | fn test_ensure_deseralize_from_empty() { 26 | let json_data = json!({}); 27 | serde_json::from_value::(json_data).unwrap(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/main.rs: -------------------------------------------------------------------------------- 1 | mod build_script; 2 | mod config; 3 | mod protocol; 4 | mod rust; 5 | 6 | use protocol::RustBackendInstantiator; 7 | 8 | #[tokio::main] 9 | pub async fn main() { 10 | if let Err(err) = pixi_build_backend::cli::main(RustBackendInstantiator::new).await { 11 | eprintln!("{err:?}"); 12 | std::process::exit(1); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/protocol.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | path::{Path, PathBuf}, 3 | str::FromStr, 4 | sync::Arc, 5 | }; 6 | 7 | use miette::{Context, IntoDiagnostic}; 8 | use pixi_build_backend::{ 9 | PackageSourceSpec, ProjectModel, 10 | common::{build_configuration, compute_variants}, 11 | protocol::{Protocol, ProtocolInstantiator}, 12 | utils::TemporaryRenderedRecipe, 13 | }; 14 | use pixi_build_types::{ 15 | BackendCapabilities, CondaPackageMetadata, PlatformAndVirtualPackages, 16 | procedures::{ 17 | conda_build::{ 18 | CondaBuildParams, CondaBuildResult, CondaBuiltPackage, CondaOutputIdentifier, 19 | }, 20 | conda_metadata::{CondaMetadataParams, CondaMetadataResult}, 21 | initialize::{InitializeParams, InitializeResult}, 22 | negotiate_capabilities::{NegotiateCapabilitiesParams, NegotiateCapabilitiesResult}, 23 | }, 24 | }; 25 | // use pixi_build_types as pbt; 26 | use rattler_build::{ 27 | build::run_build, 28 | console_utils::LoggingOutputHandler, 29 | hash::HashInfo, 30 | metadata::{Directories, Output}, 31 | recipe::{Jinja, parser::BuildString, variable::Variable}, 32 | render::resolved_dependencies::DependencyInfo, 33 | tool_configuration::Configuration, 34 | variant_config::VariantConfig, 35 | }; 36 | use rattler_conda_types::{ChannelConfig, MatchSpec, PackageName, Platform}; 37 | 38 | use crate::{ 39 | config::RustBackendConfig, 40 | rust::{RustBuildBackend, construct_configuration}, 41 | }; 42 | 43 | #[async_trait::async_trait] 44 | impl Protocol for RustBuildBackend

{ 45 | fn debug_dir(&self) -> Option<&Path> { 46 | self.config.debug_dir.as_deref() 47 | } 48 | 49 | async fn conda_get_metadata( 50 | &self, 51 | params: CondaMetadataParams, 52 | ) -> miette::Result { 53 | let channel_config = ChannelConfig { 54 | channel_alias: params.channel_configuration.base_url, 55 | root_dir: self.manifest_root.to_path_buf(), 56 | }; 57 | let channels = params.channel_base_urls.unwrap_or_default(); 58 | 59 | let host_platform = params 60 | .host_platform 61 | .as_ref() 62 | .map(|p| p.platform) 63 | .unwrap_or(Platform::current()); 64 | 65 | let package_name = PackageName::from_str(self.project_model.name()) 66 | .into_diagnostic() 67 | .context("`{name}` is not a valid package name")?; 68 | 69 | let directories = Directories::setup( 70 | package_name.as_normalized(), 71 | &self.manifest_path, 72 | ¶ms.work_directory, 73 | true, 74 | &chrono::Utc::now(), 75 | ) 76 | .into_diagnostic() 77 | .context("failed to setup build directories")?; 78 | 79 | // Build the tool configuration 80 | let tool_config = Arc::new( 81 | Configuration::builder() 82 | .with_opt_cache_dir(self.cache_dir.clone()) 83 | .with_logging_output_handler(self.logging_output_handler.clone()) 84 | .with_channel_config(channel_config.clone()) 85 | .with_testing(false) 86 | .with_keep_build(true) 87 | .finish(), 88 | ); 89 | 90 | // Create a variant config from the variant configuration in the parameters. 91 | let variants = params 92 | .variant_configuration 93 | .map(|v| { 94 | v.into_iter() 95 | .map(|(k, v)| { 96 | ( 97 | k.into(), 98 | v.into_iter().map(|v| Variable::from_string(&v)).collect(), 99 | ) 100 | }) 101 | .collect() 102 | }) 103 | .unwrap_or_default(); 104 | let variant_config = VariantConfig { 105 | variants, 106 | pin_run_as_build: None, 107 | zip_keys: None, 108 | }; 109 | 110 | // Determine the variant keys that are used in the recipe. 111 | let used_variants = self.project_model.used_variants(Some(host_platform)); 112 | 113 | // Determine the combinations of the used variants. 114 | let combinations = variant_config 115 | .combinations(&used_variants, None) 116 | .into_diagnostic()?; 117 | 118 | // Construct the different outputs 119 | let mut packages = Vec::new(); 120 | for variant in combinations { 121 | // TODO: Determine how and if we can determine this from the manifest. 122 | let (recipe, source_requirements) = self.recipe(host_platform, &variant)?; 123 | let build_configuration_params = build_configuration( 124 | channels.clone(), 125 | params.build_platform.clone(), 126 | params.host_platform.clone(), 127 | variant.clone(), 128 | directories.clone(), 129 | )?; 130 | 131 | let build_configuration = construct_configuration(&recipe, build_configuration_params); 132 | 133 | let output = Output { 134 | build_configuration, 135 | recipe, 136 | finalized_dependencies: None, 137 | finalized_cache_dependencies: None, 138 | finalized_cache_sources: None, 139 | finalized_sources: None, 140 | build_summary: Arc::default(), 141 | system_tools: Default::default(), 142 | extra_meta: None, 143 | }; 144 | 145 | let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; 146 | let tool_config = tool_config.clone(); 147 | let output = temp_recipe 148 | .within_context_async(move || async move { 149 | output 150 | .resolve_dependencies(&tool_config) 151 | .await 152 | .into_diagnostic() 153 | }) 154 | .await?; 155 | 156 | let finalized_deps = &output 157 | .finalized_dependencies 158 | .as_ref() 159 | .expect("dependencies should be resolved at this point") 160 | .run; 161 | 162 | let selector_config = output.build_configuration.selector_config(); 163 | 164 | let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); 165 | 166 | let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); 167 | let build_string = output.recipe.build().string().resolve( 168 | &hash, 169 | output.recipe.build().number(), 170 | &jinja, 171 | ); 172 | 173 | packages.push(CondaPackageMetadata { 174 | name: output.name().clone(), 175 | version: output.version().clone(), 176 | build: build_string.to_string(), 177 | build_number: output.recipe.build.number, 178 | subdir: output.build_configuration.target_platform, 179 | depends: finalized_deps 180 | .depends 181 | .iter() 182 | .map(DependencyInfo::spec) 183 | .map(MatchSpec::to_string) 184 | .collect(), 185 | constraints: finalized_deps 186 | .constraints 187 | .iter() 188 | .map(DependencyInfo::spec) 189 | .map(MatchSpec::to_string) 190 | .collect(), 191 | license: output.recipe.about.license.map(|l| l.to_string()), 192 | license_family: output.recipe.about.license_family, 193 | noarch: output.recipe.build.noarch, 194 | sources: source_requirements 195 | .run 196 | .into_iter() 197 | .map(|(k, spec)| (k, PackageSourceSpec::to_v1(spec))) 198 | .collect(), 199 | }); 200 | } 201 | 202 | Ok(CondaMetadataResult { 203 | packages, 204 | input_globs: None, 205 | }) 206 | } 207 | 208 | async fn conda_build(&self, params: CondaBuildParams) -> miette::Result { 209 | let channel_config = ChannelConfig { 210 | channel_alias: params.channel_configuration.base_url, 211 | root_dir: self.manifest_root.to_path_buf(), 212 | }; 213 | let channels = params.channel_base_urls.unwrap_or_default(); 214 | let host_platform = params 215 | .host_platform 216 | .as_ref() 217 | .map(|p| p.platform) 218 | .unwrap_or_else(Platform::current); 219 | 220 | let package_name = PackageName::from_str(self.project_model.name()) 221 | .into_diagnostic() 222 | .context("`{name}` is not a valid package name")?; 223 | 224 | let directories = Directories::setup( 225 | package_name.as_normalized(), 226 | &self.manifest_path, 227 | ¶ms.work_directory, 228 | true, 229 | &chrono::Utc::now(), 230 | ) 231 | .into_diagnostic() 232 | .context("failed to setup build directories")?; 233 | 234 | // Recompute all the variant combinations 235 | let input_variant_configuration = params.variant_configuration.map(|v| { 236 | v.into_iter() 237 | .map(|(k, v)| { 238 | ( 239 | k.into(), 240 | v.into_iter().map(|v| Variable::from_string(&v)).collect(), 241 | ) 242 | }) 243 | .collect() 244 | }); 245 | let variant_combinations = compute_variants( 246 | &self.project_model, 247 | input_variant_configuration, 248 | host_platform, 249 | )?; 250 | 251 | // Compute outputs for each variant 252 | let mut outputs = Vec::with_capacity(variant_combinations.len()); 253 | for variant in variant_combinations { 254 | let (recipe, _source_requirements) = self.recipe(host_platform, &variant)?; 255 | let build_configuration_params = build_configuration( 256 | channels.clone(), 257 | Some(PlatformAndVirtualPackages { 258 | platform: Platform::current(), 259 | virtual_packages: params.build_platform_virtual_packages.clone(), 260 | }), 261 | Some(PlatformAndVirtualPackages { 262 | platform: host_platform, 263 | virtual_packages: params 264 | .host_platform 265 | .clone() 266 | .and_then(|p| p.virtual_packages), 267 | }), 268 | variant.clone(), 269 | directories.clone(), 270 | )?; 271 | 272 | let build_configuration = construct_configuration(&recipe, build_configuration_params); 273 | 274 | let mut output = Output { 275 | build_configuration, 276 | recipe, 277 | finalized_dependencies: None, 278 | finalized_cache_dependencies: None, 279 | finalized_cache_sources: None, 280 | finalized_sources: None, 281 | build_summary: Arc::default(), 282 | system_tools: Default::default(), 283 | extra_meta: None, 284 | }; 285 | 286 | // Resolve the build string 287 | let selector_config = output.build_configuration.selector_config(); 288 | let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); 289 | let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); 290 | let build_string = output 291 | .recipe 292 | .build() 293 | .string() 294 | .resolve(&hash, output.recipe.build().number(), &jinja) 295 | .into_owned(); 296 | output.recipe.build.string = BuildString::Resolved(build_string); 297 | 298 | outputs.push(output); 299 | } 300 | 301 | // Setup tool configuration 302 | let tool_config = Arc::new( 303 | Configuration::builder() 304 | .with_opt_cache_dir(self.cache_dir.clone()) 305 | .with_logging_output_handler(self.logging_output_handler.clone()) 306 | .with_channel_config(channel_config.clone()) 307 | .with_testing(false) 308 | .with_keep_build(true) 309 | .finish(), 310 | ); 311 | 312 | // Determine the outputs to build 313 | let selected_outputs = if let Some(output_identifiers) = params.outputs { 314 | output_identifiers 315 | .into_iter() 316 | .filter_map(|iden| { 317 | let pos = outputs.iter().position(|output| { 318 | let CondaOutputIdentifier { 319 | name, 320 | version, 321 | build, 322 | subdir, 323 | } = &iden; 324 | name.as_ref() 325 | .is_none_or(|n| output.name().as_normalized() == n) 326 | && version 327 | .as_ref() 328 | .is_none_or(|v| output.version().to_string() == *v) 329 | && build 330 | .as_ref() 331 | .is_none_or(|b| output.build_string() == b.as_str()) 332 | && subdir 333 | .as_ref() 334 | .is_none_or(|s| output.target_platform().as_str() == s) 335 | })?; 336 | Some(outputs.remove(pos)) 337 | }) 338 | .collect() 339 | } else { 340 | outputs 341 | }; 342 | 343 | let mut packages = Vec::with_capacity(selected_outputs.len()); 344 | for output in selected_outputs { 345 | let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; 346 | let build_string = output 347 | .recipe 348 | .build 349 | .string 350 | .as_resolved() 351 | .expect("build string must have already been resolved") 352 | .to_string(); 353 | let tool_config = tool_config.clone(); 354 | let (output, package) = temp_recipe 355 | .within_context_async(move || async move { run_build(output, &tool_config).await }) 356 | .await?; 357 | let built_package = CondaBuiltPackage { 358 | output_file: package, 359 | input_globs: input_globs(), 360 | name: output.name().as_normalized().to_string(), 361 | version: output.version().to_string(), 362 | build: build_string.to_string(), 363 | subdir: output.target_platform().to_string(), 364 | }; 365 | packages.push(built_package); 366 | } 367 | 368 | Ok(CondaBuildResult { packages }) 369 | } 370 | } 371 | 372 | /// Determines the build input globs for given python package 373 | /// even this will be probably backend specific, e.g setuptools 374 | /// has a different way of determining the input globs than hatch etc. 375 | /// 376 | /// However, lets take everything in the directory as input for now 377 | fn input_globs() -> Vec { 378 | [ 379 | "**/*.rs", 380 | // Cargo configuration files 381 | "Cargo.toml", 382 | "Cargo.lock", 383 | // Build scripts 384 | "build.rs", 385 | ] 386 | .iter() 387 | .map(|s| s.to_string()) 388 | .collect() 389 | } 390 | 391 | pub struct RustBackendInstantiator { 392 | logging_output_handler: LoggingOutputHandler, 393 | } 394 | 395 | impl RustBackendInstantiator { 396 | pub fn new(logging_output_handler: LoggingOutputHandler) -> Self { 397 | Self { 398 | logging_output_handler, 399 | } 400 | } 401 | } 402 | 403 | #[async_trait::async_trait] 404 | impl ProtocolInstantiator for RustBackendInstantiator { 405 | fn debug_dir(configuration: Option) -> Option { 406 | configuration 407 | .and_then(|config| serde_json::from_value::(config.clone()).ok()) 408 | .and_then(|config| config.debug_dir) 409 | } 410 | 411 | async fn initialize( 412 | &self, 413 | params: InitializeParams, 414 | ) -> miette::Result<(Box, InitializeResult)> { 415 | let project_model = params 416 | .project_model 417 | .ok_or_else(|| miette::miette!("project model is required"))?; 418 | 419 | let project_model = project_model 420 | .into_v1() 421 | .ok_or_else(|| miette::miette!("project model v1 is required"))?; 422 | 423 | let config = if let Some(config) = params.configuration { 424 | serde_json::from_value(config) 425 | .into_diagnostic() 426 | .context("failed to parse configuration")? 427 | } else { 428 | RustBackendConfig::default() 429 | }; 430 | 431 | let instance = RustBuildBackend::new( 432 | params.manifest_path, 433 | project_model, 434 | config, 435 | self.logging_output_handler.clone(), 436 | params.cache_directory, 437 | )?; 438 | 439 | Ok((Box::new(instance), InitializeResult {})) 440 | } 441 | 442 | async fn negotiate_capabilities( 443 | _params: NegotiateCapabilitiesParams, 444 | ) -> miette::Result { 445 | // Returns the capabilities of this backend based on the capabilities of 446 | // the frontend. 447 | Ok(NegotiateCapabilitiesResult { 448 | capabilities: default_capabilities(), 449 | }) 450 | } 451 | } 452 | 453 | fn default_capabilities() -> BackendCapabilities { 454 | BackendCapabilities { 455 | provides_conda_metadata: Some(true), 456 | provides_conda_build: Some(true), 457 | highest_supported_project_model: Some( 458 | pixi_build_types::VersionedProjectModel::highest_version(), 459 | ), 460 | } 461 | } 462 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/rust.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, path::PathBuf, str::FromStr}; 2 | 3 | use crate::{build_script::BuildScriptContext, config::RustBackendConfig}; 4 | use miette::IntoDiagnostic; 5 | use pixi_build_backend::common::{PackageRequirements, SourceRequirements}; 6 | use pixi_build_backend::{ 7 | ProjectModel, 8 | cache::{add_sccache, enable_sccache, sccache_tools}, 9 | common::{BuildConfigurationParams, requirements}, 10 | compilers::default_compiler, 11 | traits::project::new_spec, 12 | }; 13 | use rattler_build::metadata::Debug; 14 | use rattler_build::recipe::parser::BuildString; 15 | use rattler_build::{ 16 | NormalizedKey, 17 | console_utils::LoggingOutputHandler, 18 | hash::HashInfo, 19 | metadata::{BuildConfiguration, PackagingSettings}, 20 | recipe::{ 21 | Recipe, 22 | parser::{Build, Dependency, Package, Script, ScriptContent}, 23 | variable::Variable, 24 | }, 25 | }; 26 | use rattler_conda_types::{MatchSpec, NoArchType, PackageName, Platform, package::ArchiveType}; 27 | use rattler_package_streaming::write::CompressionLevel; 28 | 29 | pub struct RustBuildBackend { 30 | pub(crate) logging_output_handler: LoggingOutputHandler, 31 | pub(crate) manifest_path: PathBuf, 32 | pub(crate) manifest_root: PathBuf, 33 | pub(crate) project_model: P, 34 | pub(crate) config: RustBackendConfig, 35 | pub(crate) cache_dir: Option, 36 | } 37 | 38 | impl RustBuildBackend

{ 39 | /// Returns a new instance of [`RustBuildBackend`]. 40 | pub fn new( 41 | manifest_path: PathBuf, 42 | project_model: P, 43 | config: RustBackendConfig, 44 | logging_output_handler: LoggingOutputHandler, 45 | cache_dir: Option, 46 | ) -> miette::Result { 47 | // Determine the root directory of the manifest 48 | let manifest_root = manifest_path 49 | .parent() 50 | .ok_or_else(|| miette::miette!("the project manifest must reside in a directory"))? 51 | .to_path_buf(); 52 | 53 | Ok(Self { 54 | manifest_path: manifest_path.to_path_buf(), 55 | manifest_root, 56 | project_model, 57 | config, 58 | logging_output_handler, 59 | cache_dir, 60 | }) 61 | } 62 | 63 | /// Returns the matchspecs for the compiler packages. 64 | /// That should be included in the build section of the recipe. 65 | /// TODO: Should we also take into account other compilers like 66 | /// c or cxx? 67 | fn compiler_packages(&self, target_platform: Platform) -> Vec { 68 | let mut compilers = vec![]; 69 | 70 | if let Some(name) = default_compiler(target_platform, "rust") { 71 | // TODO: Read this from variants 72 | // TODO: Read the version specification from variants 73 | let compiler_package = PackageName::new_unchecked(format!("{name}_{target_platform}")); 74 | compilers.push(MatchSpec::from(compiler_package)); 75 | } 76 | 77 | compilers 78 | } 79 | 80 | /// Constructs a [`Recipe`] that will build the Rust package into a conda 81 | /// package. 82 | pub(crate) fn recipe( 83 | &self, 84 | host_platform: Platform, 85 | variant: &BTreeMap, 86 | ) -> miette::Result<(Recipe, SourceRequirements

)> { 87 | // Parse the package name and version from the manifest 88 | let name = PackageName::from_str(self.project_model.name()).into_diagnostic()?; 89 | let version = self.project_model.version().clone().ok_or_else(|| { 90 | miette::miette!("a version is missing from the package but it is required") 91 | })?; 92 | 93 | let noarch_type = NoArchType::none(); 94 | 95 | let (has_sccache, requirements) = self.requirements(host_platform, variant)?; 96 | 97 | let has_openssl = self 98 | .project_model 99 | .dependencies(Some(host_platform)) 100 | .contains(&"openssl".into()); 101 | 102 | let build_number = 0; 103 | 104 | let build_script = BuildScriptContext { 105 | source_dir: self.manifest_root.display().to_string(), 106 | extra_args: self.config.extra_args.clone(), 107 | has_openssl, 108 | has_sccache, 109 | is_bash: !Platform::current().is_windows(), 110 | } 111 | .render(); 112 | 113 | let hash_info = HashInfo::from_variant(variant, &noarch_type); 114 | 115 | Ok(( 116 | Recipe { 117 | schema_version: 1, 118 | package: Package { 119 | version: version.into(), 120 | name, 121 | }, 122 | context: Default::default(), 123 | cache: None, 124 | // Sometimes Rust projects are part of a workspace, so we need to 125 | // include the entire source project and set the source directory 126 | // to the root of the package. 127 | source: vec![], 128 | build: Build { 129 | number: build_number, 130 | string: BuildString::Resolved(BuildString::compute(&hash_info, build_number)), 131 | script: Script { 132 | content: ScriptContent::Commands(build_script), 133 | env: self.config.env.clone(), 134 | ..Default::default() 135 | }, 136 | noarch: noarch_type, 137 | ..Build::default() 138 | }, 139 | requirements: requirements.requirements, 140 | tests: vec![], 141 | about: Default::default(), 142 | extra: Default::default(), 143 | }, 144 | requirements.source, 145 | )) 146 | } 147 | 148 | pub(crate) fn requirements( 149 | &self, 150 | host_platform: Platform, 151 | variant: &BTreeMap, 152 | ) -> miette::Result<(bool, PackageRequirements

)> { 153 | let project_model = &self.project_model; 154 | let mut sccache_enabled = false; 155 | 156 | let mut dependencies = project_model.dependencies(Some(host_platform)); 157 | 158 | let empty_spec = new_spec::

(); 159 | 160 | let cache_tools = sccache_tools(); 161 | 162 | if enable_sccache(std::env::vars().collect()) { 163 | sccache_enabled = true; 164 | add_sccache::

(&mut dependencies, &cache_tools, &empty_spec); 165 | } 166 | 167 | let mut package_requirements = requirements::

(dependencies, variant)?; 168 | 169 | package_requirements.requirements.build.extend( 170 | self.compiler_packages(host_platform) 171 | .into_iter() 172 | .map(Dependency::Spec), 173 | ); 174 | 175 | Ok((sccache_enabled, package_requirements)) 176 | } 177 | } 178 | 179 | /// Construct a build configuration for the given recipe and parameters. 180 | pub(crate) fn construct_configuration( 181 | recipe: &Recipe, 182 | params: BuildConfigurationParams, 183 | ) -> BuildConfiguration { 184 | BuildConfiguration { 185 | target_platform: params.host_platform.platform, 186 | host_platform: params.host_platform, 187 | build_platform: params.build_platform, 188 | hash: HashInfo::from_variant(¶ms.variant, &recipe.build.noarch), 189 | variant: params.variant, 190 | directories: params.directories, 191 | channels: params.channels, 192 | channel_priority: Default::default(), 193 | solve_strategy: Default::default(), 194 | timestamp: chrono::Utc::now(), 195 | subpackages: Default::default(), // TODO: ??? 196 | packaging_settings: PackagingSettings::from_args( 197 | ArchiveType::Conda, 198 | CompressionLevel::default(), 199 | ), 200 | store_recipe: false, 201 | force_colors: true, 202 | sandbox_config: None, 203 | debug: Debug::new(false), 204 | } 205 | } 206 | 207 | #[cfg(test)] 208 | mod tests { 209 | 210 | use std::collections::BTreeMap; 211 | 212 | use indexmap::IndexMap; 213 | use pixi_build_type_conversions::to_project_model_v1; 214 | 215 | use pixi_manifest::Manifests; 216 | use rattler_build::{console_utils::LoggingOutputHandler, recipe::Recipe}; 217 | use rattler_conda_types::{ChannelConfig, Platform}; 218 | use tempfile::tempdir; 219 | 220 | use crate::{config::RustBackendConfig, rust::RustBuildBackend}; 221 | 222 | fn recipe(manifest_source: &str, config: RustBackendConfig) -> Recipe { 223 | let tmp_dir = tempdir().unwrap(); 224 | let tmp_manifest = tmp_dir.path().join("pixi.toml"); 225 | std::fs::write(&tmp_manifest, manifest_source).unwrap(); 226 | let manifest = Manifests::from_workspace_manifest_path(tmp_manifest.clone()).unwrap(); 227 | let package = manifest.value.package.unwrap(); 228 | let channel_config = ChannelConfig::default_with_root_dir(tmp_dir.path().to_path_buf()); 229 | let project_model = to_project_model_v1(&package.value, &channel_config).unwrap(); 230 | 231 | let python_backend = RustBuildBackend::new( 232 | tmp_manifest, 233 | project_model, 234 | config, 235 | LoggingOutputHandler::default(), 236 | None, 237 | ) 238 | .unwrap(); 239 | 240 | let (recipe, _) = python_backend 241 | .recipe(Platform::current(), &BTreeMap::new()) 242 | .unwrap(); 243 | 244 | recipe 245 | } 246 | 247 | #[test] 248 | fn test_rust_is_in_build_requirements() { 249 | insta::assert_yaml_snapshot!(recipe(r#" 250 | [workspace] 251 | platforms = [] 252 | channels = [] 253 | preview = ["pixi-build"] 254 | 255 | [package] 256 | name = "foobar" 257 | version = "0.1.0" 258 | 259 | [package.build] 260 | backend = { name = "pixi-build-rust", version = "*" } 261 | "#, RustBackendConfig::default()), { 262 | ".source[0].path" => "[ ... path ... ]", 263 | ".build.script" => "[ ... script ... ]", 264 | ".requirements.build[0]" => insta::dynamic_redaction(|value, _path| { 265 | // assert that the value looks like a uuid here 266 | assert!(value.as_str().unwrap().contains("rust")); 267 | }), 268 | }); 269 | } 270 | 271 | #[test] 272 | fn test_env_vars_are_set() { 273 | let manifest_source = r#" 274 | [workspace] 275 | platforms = [] 276 | channels = [] 277 | preview = ["pixi-build"] 278 | 279 | [package] 280 | name = "foobar" 281 | version = "0.1.0" 282 | 283 | [package.build] 284 | backend = { name = "pixi-build-rust", version = "*" } 285 | "#; 286 | 287 | let env = IndexMap::from([("foo".to_string(), "bar".to_string())]); 288 | 289 | let recipe = recipe( 290 | manifest_source, 291 | RustBackendConfig { 292 | env: env.clone(), 293 | ..Default::default() 294 | }, 295 | ); 296 | 297 | assert_eq!(recipe.build.script.env, env); 298 | } 299 | } 300 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__build_script@bash.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | cargo install --locked --root "$PREFIX" --path my-prefix-dir --no-track --force 6 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__build_script@cmdexe.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | cargo install --locked --root "%PREFIX%" --path my-prefix-dir --no-track --force 6 | if errorlevel 1 exit 1 7 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__openssl@bash.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | export OPENSSL_DIR="$PREFIX" 6 | 7 | cargo install --locked --root "$PREFIX" --path my-prefix-dir --no-track --force 8 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__openssl@cmdexe.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | SET OPENSSL_DIR="%PREFIX%" 6 | 7 | cargo install --locked --root "%PREFIX%" --path my-prefix-dir --no-track --force 8 | if errorlevel 1 exit 1 9 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__sccache@bash.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | export RUSTC_WRAPPER=sccache 6 | 7 | cargo install --locked --root "$PREFIX" --path my-prefix-dir --no-track --force 8 | 9 | sccache --show-stats 10 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__build_script__test__sccache@cmdexe.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/build_script.rs 3 | expression: "script.join(\"\\n\")" 4 | --- 5 | SET RUSTC_WRAPPER=sccache 6 | 7 | cargo install --locked --root "%PREFIX%" --path my-prefix-dir --no-track --force 8 | if errorlevel 1 exit 1 9 | 10 | sccache --show-stats 11 | -------------------------------------------------------------------------------- /crates/pixi-build-rust/src/snapshots/pixi_build_rust__rust__tests__rust_is_in_build_requirements.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: crates/pixi-build-rust/src/rust.rs 3 | expression: "recipe(r#\"\n [workspace]\n platforms = []\n channels = []\n preview = [\"pixi-build\"]\n\n [package]\n name = \"foobar\"\n version = \"0.1.0\"\n\n [package.build]\n backend = { name = \"pixi-build-rust\", version = \"*\" }\n \"#,\nRustBackendConfig::default())" 4 | --- 5 | schema_version: 1 6 | context: {} 7 | package: 8 | name: foobar 9 | version: 0.1.0 10 | build: 11 | number: 0 12 | string: hbf21a9e_0 13 | script: "[ ... script ... ]" 14 | requirements: 15 | build: 16 | - ~ 17 | -------------------------------------------------------------------------------- /intra-doc-links.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cargo metadata --no-deps --format-version=1 \ 3 | | jq -r '.packages[] | .name as $pname | .targets[] | [$pname, .kind[], .name] | @tsv' \ 4 | | while IFS=$'\t' read -r package kind name; do 5 | case "$kind" in 6 | lib) 7 | cargo rustdoc -p "$package" --lib --all-features -- -D warnings -W unreachable-pub 8 | ;; 9 | bin) 10 | cargo rustdoc -p "$package" --bin "$name" --all-features -- -D warnings -W unreachable-pub 11 | ;; 12 | example) 13 | cargo rustdoc -p "$package" --example "$name" --all-features -- -D warnings -W unreachable-pub 14 | ;; 15 | test) 16 | cargo rustdoc -p "$package" --test "$name" --all-features -- -D warnings -W unreachable-pub 17 | ;; 18 | bench) 19 | cargo rustdoc -p "$package" --bench "$name" --all-features -- -D warnings -W unreachable-pub 20 | ;; 21 | esac 22 | done 23 | -------------------------------------------------------------------------------- /pixi.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pixi-build" 3 | version = "0.1.0" 4 | description = "Binary for building pixi projects into packages" 5 | authors = ["Tim de Jager "] 6 | channels = ["https://prefix.dev/conda-forge"] 7 | platforms = ["osx-arm64", "win-64", "linux-64", "osx-64"] 8 | 9 | [tasks] 10 | run = "cargo run" 11 | run-release = "cargo run --release" 12 | build = "cargo build" 13 | build-release = "cargo build --release" 14 | nextest = "cargo nextest run --workspace --all-targets" 15 | doctest = "cargo test --doc" 16 | test = [{ task = "nextest" }, { task = "doctest" }] 17 | generate-matrix = "python scripts/generate-matrix.py" 18 | 19 | 20 | install-pixi-build-python = { cmd = "cargo install --path crates/pixi-build-python --locked --force" } 21 | install-pixi-build-cmake = { cmd = "cargo install --path crates/pixi-build-cmake --locked --force" } 22 | install-pixi-build-rattler-build = { cmd = "cargo install --path crates/pixi-build-rattler-build --locked --force" } 23 | install-pixi-build-rust = { cmd = "cargo install --path crates/pixi-build-rust --locked --force" } 24 | install-pixi-backends = { depends-on = [ 25 | "install-pixi-build-python", 26 | "install-pixi-build-cmake", 27 | "install-pixi-build-rattler-build", 28 | "install-pixi-build-rust", 29 | ] } 30 | 31 | 32 | [dependencies] 33 | rust = ">=1.86.0,<1.87" 34 | python = ">=3.12.4,<4" 35 | openssl = ">=3.5.0,<4" 36 | compilers = "1.9.0.*" 37 | pkg-config = ">=0.29.2,<0.30" 38 | libzlib = ">=1.3.1,<2" 39 | liblzma = ">=5.8.1,<6" 40 | cargo-nextest = ">=0.9.96,<0.10" 41 | 42 | [activation] 43 | env.CARGO_TARGET_DIR = "target/pixi" 44 | 45 | 46 | [target.linux-64.dependencies] 47 | clang = ">=20.1.5,<20.2" 48 | mold = ">=2.33.0,<3.0" 49 | patchelf = "==0.17.2" 50 | 51 | [target.linux-64.activation] 52 | scripts = ["scripts/activate.sh"] 53 | 54 | [feature.schema.dependencies] 55 | pydantic = ">=2.8.2,<3" 56 | ruff = ">=0.11.11,<0.12" 57 | 58 | [feature.build.dependencies] 59 | rattler-build = ">=0.30.0,<1" 60 | 61 | [feature.build.tasks.build-recipe-ci] 62 | cmd = "rattler-build build --test native --output-dir={{ output_dir }} --recipe {{ recipe }} --target-platform {{ target_platform }}" 63 | args = ["output_dir", "recipe", "target_platform"] 64 | 65 | [feature.build.tasks.build-recipe] 66 | depends-on = [ 67 | { task = "build-recipe-ci", args = [ 68 | "./output", 69 | "{{ recipe }}", 70 | "{{ target_platform }}", 71 | ] }, 72 | ] 73 | args = ["recipe", "target_platform"] 74 | 75 | 76 | [feature.schema.tasks] 77 | generate-schema = "python schema/model.py > schema/schema.json" 78 | fmt-schema = "ruff format schema" 79 | lint-schema = "ruff check schema --fix" 80 | 81 | [feature.lint.dependencies] 82 | actionlint = ">=1.7.4,<2" 83 | pre-commit = ">=4.2,<5" 84 | pre-commit-hooks = ">=5,<6" 85 | shellcheck = ">=0.10.0,<0.11" 86 | taplo = ">=0.10,<0.11" 87 | typos = ">=1.23.1,<2" 88 | 89 | [feature.lint.tasks] 90 | actionlint = { cmd = "actionlint", env = { SHELLCHECK_OPTS = "-e SC2086" } } 91 | lint = "pre-commit run --all-files --hook-stage=manual" 92 | pre-commit-install = "pre-commit install" 93 | pre-commit-run = "pre-commit run --all-files" 94 | toml-format = { cmd = "taplo fmt", env = { RUST_LOG = "warn" } } 95 | toml-lint = "taplo lint --verbose **/pixi.toml" 96 | cargo-clippy = "cargo clippy --all-targets --workspace -- -D warnings" 97 | cargo-fmt = "cargo fmt --all" 98 | 99 | 100 | [environments] 101 | default = { solve-group = "default" } 102 | lint = { features = ["lint"], solve-group = "default" } 103 | schema = { features = ["schema"], no-default-feature = true } 104 | build = { features = ["build"], no-default-feature = true } 105 | -------------------------------------------------------------------------------- /recipe/pixi-build-api-version.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | context: 3 | version: "0" 4 | 5 | 6 | package: 7 | name: "pixi-build-api-version" 8 | version: ${{ version }} 9 | 10 | build: 11 | noarch: generic 12 | 13 | about: 14 | description: "This package is used to communicate capabilities between Pixi and the backends" 15 | -------------------------------------------------------------------------------- /recipe/pixi-build-cmake.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | package: 3 | name: pixi-build-cmake 4 | version: "${{ env.get('PIXI_BUILD_CMAKE_VERSION', default='0.1.0dev') }}" 5 | 6 | source: 7 | path: .. 8 | 9 | requirements: 10 | build: 11 | - ${{ compiler("rust") }} 12 | host: 13 | - pkg-config 14 | - libzlib 15 | - liblzma 16 | - if: unix 17 | then: openssl 18 | run: 19 | - pixi-build-interface==0 20 | 21 | build: 22 | script: 23 | - if: osx and x86_64 24 | then: 25 | # use the default linker for osx-64 as we are hitting a bug with the conda-forge linker 26 | # https://github.com/rust-lang/rust/issues/140686 27 | - unset CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER 28 | 29 | - if: unix 30 | then: 31 | - export OPENSSL_DIR="$PREFIX" 32 | - cargo install --locked --root $PREFIX --path crates/pixi-build-cmake --no-track 33 | else: 34 | - cargo install --locked --root %PREFIX% --path crates/pixi-build-cmake --no-track 35 | files: 36 | - bin/pixi-build-cmake 37 | - bin/pixi-build-cmake.exe 38 | 39 | tests: 40 | - script: pixi-build-cmake --help 41 | -------------------------------------------------------------------------------- /recipe/pixi-build-python.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | package: 3 | name: pixi-build-python 4 | version: "${{ env.get('PIXI_BUILD_PYTHON_VERSION', default='0.1.0dev') }}" 5 | 6 | source: 7 | path: .. 8 | 9 | requirements: 10 | build: 11 | - ${{ compiler("rust") }} 12 | host: 13 | - pkg-config 14 | - libzlib 15 | - liblzma 16 | - if: unix 17 | then: openssl 18 | run: 19 | - pixi-build-interface==0 20 | 21 | build: 22 | script: 23 | - if: osx and x86_64 24 | then: 25 | # use the default linker for osx-64 as we are hitting a bug with the conda-forge linker 26 | # https://github.com/rust-lang/rust/issues/140686 27 | - unset CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER 28 | 29 | - if: unix 30 | then: 31 | - export OPENSSL_DIR="$PREFIX" 32 | - cargo install --locked --root $PREFIX --path crates/pixi-build-python --no-track 33 | else: 34 | - cargo install --locked --root %PREFIX% --path crates/pixi-build-python --no-track 35 | files: 36 | - bin/pixi-build-python 37 | - bin/pixi-build-python.exe 38 | 39 | tests: 40 | - script: pixi-build-python --help 41 | -------------------------------------------------------------------------------- /recipe/pixi-build-rattler-build.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | package: 3 | name: pixi-build-rattler-build 4 | version: "${{ env.get('PIXI_BUILD_RATTLER_BUILD_VERSION', default='0.1.0dev') }}" 5 | 6 | source: 7 | path: .. 8 | 9 | requirements: 10 | build: 11 | - ${{ compiler("rust") }} 12 | host: 13 | - pkg-config 14 | - libzlib 15 | - liblzma 16 | - if: unix 17 | then: openssl 18 | run: 19 | - pixi-build-interface==0 20 | 21 | build: 22 | script: 23 | - if: osx and x86_64 24 | then: 25 | # use the default linker for osx-64 as we are hitting a bug with the conda-forge linker 26 | # https://github.com/rust-lang/rust/issues/140686 27 | - unset CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER 28 | 29 | - if: unix 30 | then: 31 | - export OPENSSL_DIR="$PREFIX" 32 | - cargo install --locked --root $PREFIX --path crates/pixi-build-rattler-build --no-track 33 | else: 34 | - cargo install --locked --root %PREFIX% --path crates/pixi-build-rattler-build --no-track 35 | files: 36 | - bin/pixi-build-rattler-build 37 | - bin/pixi-build-rattler-build.exe 38 | 39 | tests: 40 | - script: pixi-build-rattler-build --help 41 | -------------------------------------------------------------------------------- /recipe/pixi-build-rust.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | package: 3 | name: pixi-build-rust 4 | version: "${{ env.get('PIXI_BUILD_RUST_VERSION', default='0.1.0dev') }}" 5 | 6 | source: 7 | path: .. 8 | 9 | requirements: 10 | build: 11 | - ${{ compiler("rust") }} 12 | host: 13 | - pkg-config 14 | - libzlib 15 | - liblzma 16 | - if: unix 17 | then: openssl 18 | run: 19 | - pixi-build-interface==0 20 | 21 | build: 22 | script: 23 | - if: osx and x86_64 24 | then: 25 | # use the default linker for osx-64 as we are hitting a bug with the conda-forge linker 26 | # https://github.com/rust-lang/rust/issues/140686 27 | - unset CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER 28 | 29 | - if: unix 30 | then: 31 | - export OPENSSL_DIR="$PREFIX" 32 | - cargo install --locked --root $PREFIX --path crates/pixi-build-rust --no-track 33 | else: 34 | - cargo install --locked --root %PREFIX% --path crates/pixi-build-rust --no-track 35 | files: 36 | - bin/pixi-build-rust 37 | - bin/pixi-build-rust.exe 38 | 39 | tests: 40 | - script: pixi-build-rust --help 41 | -------------------------------------------------------------------------------- /renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["config:recommended", ":maintainLockFilesMonthly"], 4 | "schedule": ["on tuesday"], 5 | "enabledManagers": ["github-actions", "pixi", "cargo"], 6 | "commitMessagePrefix": "chore(ci):", 7 | "ignorePaths": ["**/examples/**", "**/docs/**", "**/tests/**"], 8 | "packageRules": [ 9 | { 10 | "groupName": "GitHub Actions", 11 | "matchManagers": ["github-actions"] 12 | }, 13 | { 14 | "description": "We run multiple macOS runner versions on purpose since 13 runs on x86_64", 15 | "matchPackageNames": "macos", 16 | "matchManagers": ["github-actions"], 17 | "enabled": false 18 | }, 19 | { 20 | "groupName": "Pixi", 21 | "matchManagers": ["pixi"] 22 | }, 23 | { 24 | "groupName": "Pixi-Lock", 25 | "matchManagers": ["pixi"], 26 | "matchUpdateTypes": ["lockFileMaintenance"] 27 | }, 28 | { 29 | "description": "We want to update Rust manually and keep it in sync with rust-toolchain", 30 | "matchPackageNames": "rust", 31 | "matchManagers": ["pixi"], 32 | "enabled": false 33 | }, 34 | { 35 | "groupName": "Cargo", 36 | "matchManagers": ["cargo"] 37 | }, 38 | { 39 | "groupName": "Cargo-Lock", 40 | "matchManagers": ["cargo"], 41 | "matchUpdateTypes": ["lockFileMaintenance"] 42 | }, 43 | { 44 | "description": "We want a separate PR for rattler crates", 45 | "matchPackageNames": ["rattler*", "file_url"], 46 | "matchManagers": ["cargo"], 47 | "enabled": false 48 | } 49 | ] 50 | } 51 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | 1.86.0 2 | -------------------------------------------------------------------------------- /schema/model.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from typing import Annotated, List, Union 5 | from pydantic import ( 6 | BaseModel, 7 | Field, 8 | TypeAdapter, 9 | DirectoryPath, 10 | UrlConstraints, 11 | constr, 12 | NonNegativeInt, 13 | AnyHttpUrl 14 | ) 15 | from pydantic_core import Url 16 | 17 | NonEmptyStr = constr(min_length=1) 18 | 19 | Platform = NonEmptyStr 20 | 21 | CondaUrl = Annotated[Url, UrlConstraints(allowed_schemes=['http', 'https', 'file'])] 22 | 23 | # TODO: Add regex maybe? 24 | PackageName = NonEmptyStr 25 | Version = NonEmptyStr 26 | 27 | 28 | # ============================================= 29 | # RPC: initialize 30 | # ============================================= 31 | class BackendCapabilities(BaseModel): 32 | providesCondaMetadata: bool | None = Field( 33 | None, 34 | description="Whether the backend is capable of providing metadata about source packages", 35 | ) 36 | 37 | 38 | class FrontendCapabilities(BaseModel): 39 | pass 40 | 41 | 42 | class InitializeParams(BaseModel): 43 | """ 44 | The params send as part of the `initialize` rpc method. The expected result is of type `InitializeResult`. 45 | """ 46 | 47 | sourceDir: DirectoryPath = Field( 48 | ..., 49 | description="An absolute path to the directory that contains the source files", 50 | ) 51 | 52 | capabilities: FrontendCapabilities = Field( 53 | ..., description="Capabilities provided by the frontend" 54 | ) 55 | 56 | 57 | class InitializeResult(BaseModel): 58 | """ 59 | The result of the `initialize` rpc method. 60 | """ 61 | 62 | capabilities: BackendCapabilities 63 | 64 | 65 | # ============================================= 66 | # RPC: condaMetadata 67 | # 68 | # This is used to determine metadata of the conda packages in the source directory. 69 | # ============================================= 70 | class CondaMetadataParams(BaseModel): 71 | targetPlatform: Platform | None = Field( 72 | None, 73 | description="The target platform, or the current platform if not specified", 74 | ) 75 | channelBaseUrls: List[CondaUrl] = Field(None, description="Urls of channels to use for any resolution.") 76 | 77 | 78 | class CondaPackageMetadata(BaseModel): 79 | name: PackageName = Field(..., description="The name of the package") 80 | version: Version = Field(..., description="The version of the package") 81 | build: NonEmptyStr = Field(..., description="The build string of the package") 82 | buildNumber: NonNegativeInt = Field( 83 | 0, description="The build number of the package" 84 | ) 85 | subdir: Platform = Field(..., description="The subdirectory of the package") 86 | depends: List[NonEmptyStr] | None = Field( 87 | None, description="The dependencies of the package" 88 | ) 89 | constrains: List[NonEmptyStr] | None = Field( 90 | None, description="Additional run constraints that apply to the package" 91 | ) 92 | license: NonEmptyStr | None = Field(None, description="The license of the package") 93 | licenseFamily: NonEmptyStr | None = Field( 94 | None, description="The license family of the package" 95 | ) 96 | 97 | 98 | class CondaMetadataResult(BaseModel): 99 | packages: List[CondaPackageMetadata] 100 | 101 | 102 | Schema = TypeAdapter( 103 | Union[InitializeParams, InitializeResult, CondaMetadataParams, CondaMetadataResult] 104 | ) 105 | 106 | 107 | if __name__ == "__main__": 108 | print(json.dumps(Schema.json_schema(), indent=2)) 109 | -------------------------------------------------------------------------------- /schema/schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$defs": { 3 | "BackendCapabilities": { 4 | "properties": { 5 | "providesCondaMetadata": { 6 | "anyOf": [ 7 | { 8 | "type": "boolean" 9 | }, 10 | { 11 | "type": "null" 12 | } 13 | ], 14 | "default": null, 15 | "description": "Whether the backend is capable of providing metadata about source packages", 16 | "title": "Providescondametadata" 17 | } 18 | }, 19 | "title": "BackendCapabilities", 20 | "type": "object" 21 | }, 22 | "CondaMetadataParams": { 23 | "properties": { 24 | "targetPlatform": { 25 | "anyOf": [ 26 | { 27 | "minLength": 1, 28 | "type": "string" 29 | }, 30 | { 31 | "type": "null" 32 | } 33 | ], 34 | "default": null, 35 | "description": "The target platform, or the current platform if not specified", 36 | "title": "Targetplatform" 37 | }, 38 | "channelBaseUrls": { 39 | "default": null, 40 | "description": "Urls of channels to use for any resolution.", 41 | "items": { 42 | "format": "uri", 43 | "minLength": 1, 44 | "type": "string" 45 | }, 46 | "title": "Channelbaseurls", 47 | "type": "array" 48 | } 49 | }, 50 | "title": "CondaMetadataParams", 51 | "type": "object" 52 | }, 53 | "CondaMetadataResult": { 54 | "properties": { 55 | "packages": { 56 | "items": { 57 | "$ref": "#/$defs/CondaPackageMetadata" 58 | }, 59 | "title": "Packages", 60 | "type": "array" 61 | } 62 | }, 63 | "required": [ 64 | "packages" 65 | ], 66 | "title": "CondaMetadataResult", 67 | "type": "object" 68 | }, 69 | "CondaPackageMetadata": { 70 | "properties": { 71 | "name": { 72 | "description": "The name of the package", 73 | "minLength": 1, 74 | "title": "Name", 75 | "type": "string" 76 | }, 77 | "version": { 78 | "description": "The version of the package", 79 | "minLength": 1, 80 | "title": "Version", 81 | "type": "string" 82 | }, 83 | "build": { 84 | "description": "The build string of the package", 85 | "minLength": 1, 86 | "title": "Build", 87 | "type": "string" 88 | }, 89 | "buildNumber": { 90 | "default": 0, 91 | "description": "The build number of the package", 92 | "minimum": 0, 93 | "title": "Buildnumber", 94 | "type": "integer" 95 | }, 96 | "subdir": { 97 | "description": "The subdirectory of the package", 98 | "minLength": 1, 99 | "title": "Subdir", 100 | "type": "string" 101 | }, 102 | "depends": { 103 | "anyOf": [ 104 | { 105 | "items": { 106 | "minLength": 1, 107 | "type": "string" 108 | }, 109 | "type": "array" 110 | }, 111 | { 112 | "type": "null" 113 | } 114 | ], 115 | "default": null, 116 | "description": "The dependencies of the package", 117 | "title": "Depends" 118 | }, 119 | "constrains": { 120 | "anyOf": [ 121 | { 122 | "items": { 123 | "minLength": 1, 124 | "type": "string" 125 | }, 126 | "type": "array" 127 | }, 128 | { 129 | "type": "null" 130 | } 131 | ], 132 | "default": null, 133 | "description": "Additional run constraints that apply to the package", 134 | "title": "Constrains" 135 | }, 136 | "license": { 137 | "anyOf": [ 138 | { 139 | "minLength": 1, 140 | "type": "string" 141 | }, 142 | { 143 | "type": "null" 144 | } 145 | ], 146 | "default": null, 147 | "description": "The license of the package", 148 | "title": "License" 149 | }, 150 | "licenseFamily": { 151 | "anyOf": [ 152 | { 153 | "minLength": 1, 154 | "type": "string" 155 | }, 156 | { 157 | "type": "null" 158 | } 159 | ], 160 | "default": null, 161 | "description": "The license family of the package", 162 | "title": "Licensefamily" 163 | } 164 | }, 165 | "required": [ 166 | "name", 167 | "version", 168 | "build", 169 | "subdir" 170 | ], 171 | "title": "CondaPackageMetadata", 172 | "type": "object" 173 | }, 174 | "FrontendCapabilities": { 175 | "properties": {}, 176 | "title": "FrontendCapabilities", 177 | "type": "object" 178 | }, 179 | "InitializeParams": { 180 | "description": "The params send as part of the `initialize` rpc method. The expected result is of type `InitializeResult`.", 181 | "properties": { 182 | "sourceDir": { 183 | "description": "An absolute path to the directory that contains the source files", 184 | "format": "directory-path", 185 | "title": "Sourcedir", 186 | "type": "string" 187 | }, 188 | "capabilities": { 189 | "allOf": [ 190 | { 191 | "$ref": "#/$defs/FrontendCapabilities" 192 | } 193 | ], 194 | "description": "Capabilities provided by the frontend" 195 | } 196 | }, 197 | "required": [ 198 | "sourceDir", 199 | "capabilities" 200 | ], 201 | "title": "InitializeParams", 202 | "type": "object" 203 | }, 204 | "InitializeResult": { 205 | "description": "The result of the `initialize` rpc method.", 206 | "properties": { 207 | "capabilities": { 208 | "$ref": "#/$defs/BackendCapabilities" 209 | } 210 | }, 211 | "required": [ 212 | "capabilities" 213 | ], 214 | "title": "InitializeResult", 215 | "type": "object" 216 | } 217 | }, 218 | "anyOf": [ 219 | { 220 | "$ref": "#/$defs/InitializeParams" 221 | }, 222 | { 223 | "$ref": "#/$defs/InitializeResult" 224 | }, 225 | { 226 | "$ref": "#/$defs/CondaMetadataParams" 227 | }, 228 | { 229 | "$ref": "#/$defs/CondaMetadataResult" 230 | } 231 | ] 232 | } 233 | -------------------------------------------------------------------------------- /scripts/activate.sh: -------------------------------------------------------------------------------- 1 | export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER="clang" 2 | export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=-fuse-ld=$CONDA_PREFIX/bin/mold" 3 | -------------------------------------------------------------------------------- /scripts/generate-matrix.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import json 3 | import re 4 | 5 | 6 | def get_git_tags(): 7 | try: 8 | result = subprocess.run( 9 | ["git", "tag", "--points-at", "HEAD"], 10 | capture_output=True, 11 | text=True, 12 | check=True, 13 | ) 14 | return result.stdout.strip().splitlines() 15 | except subprocess.CalledProcessError: 16 | # if no tags are found, return an empty list 17 | return [] 18 | 19 | 20 | def extract_name_and_version_from_tag(tag): 21 | match = re.match(r"(pixi-build-[a-zA-Z-]+)-v(\d+\.\d+\.\d+)", tag) 22 | if match: 23 | return match.group(1), match.group(2) 24 | raise ValueError( 25 | f"Invalid Git tag format: {tag}. Expected format: pixi-build-[name]-v[version]" 26 | ) 27 | 28 | 29 | def generate_matrix(): 30 | # Run cargo metadata 31 | result = subprocess.run( 32 | ["cargo", "metadata", "--format-version=1", "--no-deps"], 33 | capture_output=True, 34 | text=True, 35 | check=True, 36 | ) 37 | 38 | metadata = json.loads(result.stdout) 39 | # this is to overcome the issue of matrix generation from github actions side 40 | # https://github.com/orgs/community/discussions/67591 41 | targets = [ 42 | {"target": "linux-64", "os": "ubuntu-latest"}, 43 | {"target": "linux-aarch64", "os": "ubuntu-latest"}, 44 | {"target": "linux-ppc64le", "os": "ubuntu-latest"}, 45 | {"target": "win-64", "os": "windows-latest"}, 46 | {"target": "osx-64", "os": "macos-13"}, 47 | {"target": "osx-arm64", "os": "macos-14"}, 48 | ] 49 | 50 | git_tags = get_git_tags() 51 | 52 | # Extract bin names, versions, and generate env and recipe names 53 | matrix = [] 54 | 55 | if "packages" not in metadata: 56 | raise ValueError("No packages found using cargo metadata") 57 | 58 | # verify that the tags match the package versions 59 | tagged_packages = {tag: False for tag in git_tags} 60 | 61 | for package in metadata["packages"]: 62 | package_tagged = False 63 | # we need to find only the packages that have a binary target 64 | if any(target["kind"][0] == "bin" for target in package.get("targets", [])): 65 | for git_tag in git_tags: 66 | # verify that the git tag matches the package version 67 | tag_name, tag_version = extract_name_and_version_from_tag(git_tag) 68 | if package["name"] != tag_name: 69 | continue # Skip packages that do not match the tag 70 | 71 | if package["version"] != tag_version: 72 | raise ValueError( 73 | f"Version mismatch: Git tag version {tag_version} does not match Cargo version {package['version']} for {package['name']}" 74 | ) 75 | 76 | tagged_packages[git_tag] = package 77 | package_tagged = True 78 | 79 | # verify that tags exist for this HEAD 80 | # and that the package has been tagged 81 | if tagged_packages and not package_tagged: 82 | continue 83 | 84 | for target in targets: 85 | matrix.append( 86 | { 87 | "bin": package["name"], 88 | "target": target["target"], 89 | "version": package["version"], 90 | "env_name": f"{package["name"].replace("-", "_").upper()}_VERSION", 91 | "os": target["os"], 92 | } 93 | ) 94 | 95 | if tagged_packages: 96 | for git_tag, has_a_package in tagged_packages.items(): 97 | if not has_a_package: 98 | raise ValueError( 99 | f"Git tag {git_tag} does not match any package in Cargo.toml" 100 | ) 101 | 102 | matrix_json = json.dumps(matrix) 103 | 104 | print(matrix_json) 105 | 106 | 107 | if __name__ == "__main__": 108 | generate_matrix() 109 | -------------------------------------------------------------------------------- /tests/recipe/boltons/recipe.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 2 | 3 | context: 4 | version: "23.0.0" 5 | 6 | 7 | package: 8 | name: boltons-with-extra 9 | version: ${{ version }} 10 | 11 | source: 12 | url: https://github.com/mahmoud/boltons/archive/refs/tags/${{ version }}.tar.gz 13 | sha256: 9b2998cd9525ed472079c7dd90fbd216a887202e8729d5969d4f33878f0ff668 14 | 15 | build: 16 | noarch: python 17 | script: 18 | - python -m pip install . --no-deps -vv 19 | 20 | requirements: 21 | host: 22 | # - if: linux 23 | # then: 24 | - python 25 | - pip 26 | - setuptools 27 | # - numpy 28 | # - ${{ stdlib('c') }} 29 | run: 30 | - pip 31 | # - ${{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }} 32 | 33 | about: 34 | license: BSD-3-Clause 35 | license_file: LICENSE 36 | --------------------------------------------------------------------------------