├── .github └── workflows │ ├── build-cmake-linux-armv7.yml │ ├── build-wheels-defined.yml │ ├── build-wheels-platforms.yml │ ├── build-wheels-python-dependent.yml │ ├── create-release.yml │ ├── danger.yml │ ├── issue_comment.yml │ ├── new_issues.yml │ ├── new_prs.yml │ ├── only-create-and-upload-index.yml │ └── upload-python-wheels.yml ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── _helper_functions.py ├── build_requirements.txt ├── build_wheels.py ├── build_wheels_from_file.py ├── create_index_pages.py ├── exclude_list.yaml ├── include_list.yaml ├── os_dependencies ├── linux_arm.sh ├── macos.sh └── ubuntu.sh ├── pyproject.toml ├── resources ├── espressif.ico ├── html │ ├── footer.html │ ├── header.html │ └── pretty_header.html ├── idf-python-wheels_diagram.svg └── idf-python-wheels_diagram_source.txt ├── test └── test_list.yaml ├── test_build_wheels.py ├── upload_wheels.py └── yaml_list_adapter.py /.github/workflows/build-cmake-linux-armv7.yml: -------------------------------------------------------------------------------- 1 | name: armv7-cmake-dispatch 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | cmake_version: 7 | description: > 8 | Cmake version to build and upload 9 | For example 3.23.1 10 | type: string 11 | required: true 12 | 13 | jobs: 14 | build-cmake: 15 | name: Build cmake for linux-armv7 16 | runs-on: 17 | - self-hosted 18 | - ARM 19 | container: 20 | image: ghcr.io/espressif/github-esp-dockerfiles/pyenv_rust_powershell:v2 21 | options: --privileged 22 | steps: 23 | - name: Checkout repository 24 | uses: actions/checkout@v2 25 | - name: Download and unpack cmake 26 | shell: bash 27 | run: | 28 | wget "https://github.com/Kitware/CMake/releases/download/v${{ inputs.cmake_version }}/cmake-${{ inputs.cmake_version }}.tar.gz" 29 | tar -xf "cmake-${{ inputs.cmake_version }}.tar.gz" 30 | - name: Build cmake 31 | shell: bash 32 | run: | 33 | cd "cmake-${{ inputs.cmake_version }}" 34 | mkdir cmake-build 35 | cd cmake-build 36 | ../bootstrap && make && make install 37 | - name: Create packages 38 | shell: bash 39 | working-directory: 'cmake-${{ inputs.cmake_version }}/cmake-build' 40 | run: cpack -G TGZ && cpack -G STGZ 41 | - name: Upload cmake to s3 42 | env: 43 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 44 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 45 | AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} 46 | AWS_BUCKET: ${{ secrets.DL_BUCKET }} 47 | PREFIX: 'dl/cmake' 48 | shell: bash 49 | working-directory: 'cmake-${{ inputs.cmake_version }}/cmake-build' 50 | run: | 51 | aws s3 cp --acl=public-read --no-progress "cmake-${{ inputs.cmake_version }}-Linux-armv7l.tar.gz" "s3://$AWS_BUCKET/dl/cmake/cmake-${{ inputs.cmake_version }}-Linux-armv7l.tar.gz" 52 | - name: Drop AWS cache 53 | id: invalidate-index-cache 54 | env: 55 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 56 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 57 | AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} 58 | run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CACHE_INVALIDATION }} --paths "/dl/cmake/*" 59 | -------------------------------------------------------------------------------- /.github/workflows/build-wheels-defined.yml: -------------------------------------------------------------------------------- 1 | name: defined-wheels 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | packages: 6 | description: > 7 | Generate wheels for given packages separated by space. 8 | Requirement specifiers and markers can be used. 9 | For example esptool~=4.5 esp-coredump~=1.2;sys_platform!='win32' 10 | type: string 11 | required: true 12 | 13 | os_ubuntu_latest: 14 | description: Build on ubuntu-latest(x86_64) 15 | type: boolean 16 | required: false 17 | default: true 18 | os_windows_latest: 19 | description: Build on windows-latest(x86_64) 20 | type: boolean 21 | required: false 22 | default: true 23 | os_macos_latest: 24 | description: Build on macos-latest(x86_64) 25 | type: boolean 26 | required: false 27 | default: true 28 | os_macos_arm64: 29 | description: Build on macos M1(aarch64) 30 | type: boolean 31 | required: false 32 | default: true 33 | os_linux_armv7: 34 | description: Build on linux armv7(aarch32) 35 | type: boolean 36 | required: false 37 | default: true 38 | os_linux_arm64: 39 | description: Build on linux arm64(aarch64) 40 | type: boolean 41 | required: false 42 | default: true 43 | 44 | jobs: 45 | ubuntu-latest: 46 | name: linux x86_64 47 | if: ${{ inputs.os_ubuntu_latest }} 48 | runs-on: ubuntu-latest 49 | strategy: 50 | fail-fast: false 51 | matrix: 52 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 53 | steps: 54 | - name: Checkout repository 55 | uses: actions/checkout@v4 56 | 57 | - name: Setup Python 58 | uses: actions/setup-python@v5 59 | with: 60 | python-version: ${{ matrix.python-version}} 61 | 62 | - name: Get Python version 63 | run: | 64 | python --version 65 | python -m pip install --upgrade pip 66 | 67 | - name: Install build dependencies 68 | run: python -m pip install -r build_requirements.txt 69 | 70 | - name: Install additional OS dependencies - Ubuntu 71 | run: os_dependencies/ubuntu.sh 72 | 73 | - name: Build wheels 74 | run: | 75 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 76 | 77 | - name: Upload artifacts of downloaded_wheels directory 78 | uses: actions/upload-artifact@v4 79 | with: 80 | name: wheels-download-directory-ubuntu-${{ matrix.python-version }} 81 | path: ./downloaded_wheels 82 | 83 | 84 | windows-latest: 85 | name: windows x86_64 86 | if: ${{ inputs.os_windows_latest }} 87 | runs-on: windows-latest 88 | strategy: 89 | fail-fast: false 90 | matrix: 91 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 92 | steps: 93 | - name: Checkout repository 94 | uses: actions/checkout@v4 95 | 96 | - name: Setup Python 97 | uses: actions/setup-python@v5 98 | with: 99 | python-version: ${{ matrix.python-version}} 100 | 101 | - name: Get Python version 102 | run: | 103 | python --version 104 | python -m pip install --upgrade pip 105 | 106 | - name: Install build dependencies 107 | run: python -m pip install -r build_requirements.txt 108 | 109 | - name: Build wheels 110 | run: | 111 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 112 | 113 | - name: Upload artifacts of downloaded_wheels directory 114 | uses: actions/upload-artifact@v4 115 | with: 116 | name: wheels-download-directory-windows-${{ matrix.python-version }} 117 | path: ./downloaded_wheels 118 | 119 | 120 | macos-latest: 121 | name: macos x86_64 122 | if: ${{ inputs.os_macos_latest }} 123 | runs-on: macos-latest 124 | strategy: 125 | fail-fast: false 126 | matrix: 127 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 128 | steps: 129 | - name: Checkout repository 130 | uses: actions/checkout@v4 131 | 132 | - name: Setup Python 133 | uses: actions/setup-python@v5 134 | with: 135 | python-version: ${{ matrix.python-version}} 136 | 137 | - name: Get Python version 138 | run: | 139 | python --version 140 | python -m pip install --upgrade pip 141 | 142 | - name: Install build dependencies 143 | run: python -m pip install -r build_requirements.txt 144 | 145 | - name: Install additional OS dependencies - MacOS 146 | run: os_dependencies/macos.sh 147 | 148 | - name: Build wheels 149 | run: | 150 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 151 | 152 | - name: Upload artifacts of downloaded_wheels directory 153 | uses: actions/upload-artifact@v4 154 | with: 155 | name: wheels-download-directory-macos-x86-${{ matrix.python-version }} 156 | path: ./downloaded_wheels 157 | 158 | 159 | macos-m1: 160 | name: macos arm64 161 | if: ${{ inputs.os_macos_arm64 }} 162 | runs-on: macos-latest-xlarge # MacOS M1 GitHub beta runner - paid $0.16 163 | strategy: 164 | fail-fast: false 165 | matrix: 166 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 167 | steps: 168 | - name: Checkout repository 169 | uses: actions/checkout@v4 170 | 171 | - name: Setup Python 172 | # Temporary solution until Python version for build will be >= 3.10 (GitHub action support) 173 | if: matrix.python-version == '3.12' 174 | uses: actions/setup-python@v5 175 | with: 176 | python-version: ${{ matrix.python-version }} 177 | 178 | - name: Setup Python - MacOS M1 179 | # Temporary solution until Python version for build will be >= 3.10 (GitHub action support) 180 | if: matrix.python-version != '3.12' 181 | run: | 182 | brew install python@${{ matrix.python-version }} 183 | # change python symlink called with default command 'python' 184 | ln -s -f /opt/homebrew/bin/python${{ matrix.python-version }} /usr/local/bin/python 185 | 186 | - name: Get Python version 187 | run: | 188 | python --version 189 | python -m pip install --upgrade pip 190 | 191 | - name: Install build dependencies 192 | run: python -m pip install -r build_requirements.txt 193 | 194 | - name: Install additional OS dependencies - MacOS 195 | run: os_dependencies/macos.sh 196 | 197 | - name: Build wheels 198 | run: | 199 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 200 | 201 | - name: Upload artifacts of downloaded_wheels directory 202 | uses: actions/upload-artifact@v4 203 | with: 204 | name: wheels-download-directory-macos-arm64-${{ matrix.python-version }} 205 | path: ./downloaded_wheels 206 | 207 | 208 | linux-armv7: 209 | name: linux aarch32 (armv7) 210 | if: ${{ inputs.os_linux_armv7 }} 211 | runs-on: linux-armv7-self-hosted 212 | strategy: 213 | fail-fast: false 214 | matrix: 215 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 216 | include: 217 | - python-version: '3.8' 218 | CONTAINER: 'python:3.8-bullseye' 219 | - python-version: '3.9' 220 | CONTAINER: 'python:3.9-bullseye' 221 | - python-version: '3.10' 222 | CONTAINER: 'python:3.10-bullseye' 223 | - python-version: '3.11' 224 | CONTAINER: 'python:3.11-bullseye' 225 | - python-version: '3.12' 226 | CONTAINER: 'python:3.12-bullseye' 227 | - python-version: '3.13' 228 | CONTAINER: 'python:3.13-bullseye' 229 | container: ${{ matrix.CONTAINER }} 230 | steps: 231 | - name: Checkout repository 232 | uses: actions/checkout@v4 233 | 234 | - name: Get Python version 235 | run: | 236 | python --version 237 | python -m pip install --upgrade pip 238 | 239 | - name: Install build dependencies 240 | run: python -m pip install -r build_requirements.txt 241 | 242 | - name: Install additional OS dependencies - Linux ARM 243 | run: os_dependencies/linux_arm.sh 244 | 245 | - name: Build wheels 246 | run: | 247 | # Rust directory needs to be included for Linux ARM7 248 | . $HOME/.cargo/env 249 | 250 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 251 | 252 | - name: Upload artifacts of downloaded_wheels directory 253 | uses: actions/upload-artifact@v4 254 | with: 255 | name: wheels-download-directory-linux-arm7-${{ matrix.python-version }} 256 | path: ./downloaded_wheels 257 | 258 | 259 | linux-arm64: 260 | name: linux aarch64 (arm64) 261 | if: ${{ inputs.os_linux_arm64 }} 262 | runs-on: linux-arm64-self-hosted 263 | strategy: 264 | fail-fast: false 265 | matrix: 266 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] 267 | include: 268 | - python-version: '3.8' 269 | CONTAINER: 'python:3.8-bullseye' 270 | - python-version: '3.9' 271 | CONTAINER: 'python:3.9-bullseye' 272 | - python-version: '3.10' 273 | CONTAINER: 'python:3.10-bullseye' 274 | - python-version: '3.11' 275 | CONTAINER: 'python:3.11-bullseye' 276 | - python-version: '3.12' 277 | CONTAINER: 'python:3.12-bullseye' 278 | - python-version: '3.13' 279 | CONTAINER: 'python:3.13-bullseye' 280 | container: ${{ matrix.CONTAINER }} 281 | steps: 282 | - name: Checkout repository 283 | uses: actions/checkout@v4 284 | 285 | - name: Get Python version 286 | run: | 287 | python --version 288 | python -m pip install --upgrade pip 289 | 290 | - name: Install build dependencies 291 | run: python -m pip install -r build_requirements.txt 292 | 293 | - name: Install additional OS dependencies - Linux ARM 294 | run: os_dependencies/linux_arm.sh 295 | 296 | - name: Build wheels 297 | run: | 298 | python build_wheels_from_file.py --requirements ${{ inputs.packages }} 299 | 300 | - name: Upload artifacts of downloaded_wheels directory 301 | uses: actions/upload-artifact@v4 302 | with: 303 | name: wheels-download-directory-linux-arm64-${{ matrix.python-version }} 304 | path: ./downloaded_wheels 305 | 306 | 307 | upload-python-wheels: 308 | if: ${{ always() }} 309 | needs: [ubuntu-latest, windows-latest, macos-latest, macos-m1, linux-armv7, linux-arm64] 310 | name: Upload Python wheels 311 | uses: espressif/idf-python-wheels/.github/workflows/upload-python-wheels.yml@main 312 | secrets: inherit 313 | -------------------------------------------------------------------------------- /.github/workflows/build-wheels-platforms.yml: -------------------------------------------------------------------------------- 1 | name: platforms-dispatches 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 0,3' 6 | workflow_dispatch: 7 | 8 | env: 9 | MIN_IDF_MAJOR_VERSION: ${{ vars.MIN_IDF_MAJOR_VERSION }} 10 | MIN_IDF_MINOR_VERSION: ${{ vars.MIN_IDF_MINOR_VERSION }} 11 | GH_TOKEN : ${{ secrets.GITHUB_TOKEN }} 12 | 13 | jobs: 14 | build-wheels: 15 | name: Build for ${{ matrix.os }} (Python ${{matrix.python-version}}) 16 | runs-on: ${{ matrix.os }} 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | os: # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories 21 | - windows-latest 22 | - ubuntu-latest 23 | - macos-13 # MacOS x86_64 24 | - macos-latest # MacOS arm64 (M1) 25 | - linux-armv7-self-hosted 26 | - linux-arm64-self-hosted 27 | include: 28 | - os: linux-armv7-self-hosted 29 | CONTAINER: python:3.8-bullseye 30 | - os: linux-arm64-self-hosted 31 | CONTAINER: python:3.8-bullseye 32 | python-version: ['3.8'] 33 | 34 | # Use python container on ARM 35 | container: ${{ matrix.CONTAINER }} 36 | 37 | steps: 38 | - name: OS info 39 | if: matrix.os != 'windows-latest' 40 | run: | 41 | echo "Operating System: ${{ runner.os }}" 42 | echo "Architecture: $(uname -m)" 43 | - name: OS info 44 | if: matrix.os == 'windows-latest' 45 | run: | 46 | echo "Operating System: ${{ runner.os }}" 47 | echo "Architecture: $env:PROCESSOR_ARCHITECTURE" 48 | 49 | 50 | - name: Checkout repository 51 | uses: actions/checkout@v4 52 | 53 | 54 | - name: Setup Python 55 | # Skip setting python on ARM because of missing compatibility: https://github.com/actions/setup-python/issues/108 56 | if: matrix.os != 'linux-armv7-self-hosted' && matrix.os != 'linux-arm64-self-hosted' 57 | uses: actions/setup-python@v5 58 | with: 59 | python-version: ${{ matrix.python-version }} 60 | 61 | 62 | - name: Get Python version 63 | run: | 64 | python --version 65 | python -m pip install --upgrade pip setuptools setuptools-scm 66 | pip show pip setuptools setuptools-scm 67 | 68 | 69 | - name: Install build dependencies 70 | run: python -m pip install -r build_requirements.txt 71 | 72 | 73 | - name: Install additional OS dependencies - Ubuntu 74 | if: matrix.os == 'ubuntu-latest' 75 | run: os_dependencies/ubuntu.sh 76 | 77 | - name: Install additional OS dependencies - MacOS 78 | if: matrix.os == 'macos-latest' || matrix.os == 'macos-13' 79 | run: os_dependencies/macos.sh 80 | 81 | - name: Install additional OS dependencies - Linux ARM 82 | if: matrix.os == 'linux-armv7-self-hosted' || matrix.os == 'linux-arm64-self-hosted' 83 | run: os_dependencies/linux_arm.sh 84 | 85 | 86 | - name: Build wheels for IDF 87 | if: matrix.os != 'windows-latest' 88 | run: | 89 | # Rust directory needs to be included for Linux ARM7 90 | if [ "${{ matrix.os }}" = "linux-armv7-self-hosted" ]; then 91 | . $HOME/.cargo/env 92 | fi 93 | 94 | python build_wheels.py 95 | 96 | - name: Build wheels for IDF - Windows 97 | if: matrix.os == 'windows-latest' 98 | run: | 99 | python build_wheels.py 100 | 101 | - name: Upload artifacts of downloaded_wheels directory 102 | uses: actions/upload-artifact@v4 103 | with: 104 | name: wheels-download-directory-${{ matrix.os}}-${{ matrix.python-version }} 105 | path: ./downloaded_wheels 106 | 107 | - name: Upload artifacts of Python version dependent wheels 108 | uses: actions/upload-artifact@v4 109 | with: 110 | name: dependent_requirements_${{ matrix.os}} 111 | path: ./dependent_requirements.txt 112 | 113 | 114 | build-python-version-dependent-wheels: 115 | needs: build-wheels 116 | name: Build Python version dependendent wheels for IDF 117 | uses: espressif/idf-python-wheels/.github/workflows/build-wheels-python-dependent.yml@main 118 | 119 | upload-python-wheels: 120 | needs: [build-wheels, build-python-version-dependent-wheels] 121 | name: Upload Python wheels 122 | uses: espressif/idf-python-wheels/.github/workflows/upload-python-wheels.yml@main 123 | secrets: inherit 124 | -------------------------------------------------------------------------------- /.github/workflows/build-wheels-python-dependent.yml: -------------------------------------------------------------------------------- 1 | name: Build Python version dependent wheels 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | triage: 8 | name: ${{ matrix.os }} - ${{ matrix.python-version }} 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | os: 14 | - windows-latest 15 | - ubuntu-latest 16 | - macos-13 # MacOS x86_64 17 | - macos-latest # MacOS arm64 (M1) 18 | - linux-armv7-self-hosted 19 | - linux-arm64-self-hosted 20 | python-version: 21 | - '3.9' 22 | - '3.10' 23 | - '3.11' 24 | - '3.12' 25 | - '3.13' 26 | include: 27 | - os: linux-armv7-self-hosted 28 | python-version: '3.9' 29 | CONTAINER: 'python:3.9-bullseye' 30 | - os: linux-armv7-self-hosted 31 | python-version: '3.10' 32 | CONTAINER: 'python:3.10-bullseye' 33 | - os: linux-armv7-self-hosted 34 | python-version: '3.11' 35 | CONTAINER: 'python:3.11-bullseye' 36 | - os: linux-armv7-self-hosted 37 | python-version: '3.12' 38 | CONTAINER: 'python:3.12-bullseye' 39 | - os: linux-armv7-self-hosted 40 | python-version: '3.13' 41 | CONTAINER: 'python:3.13-bullseye' 42 | 43 | - os: linux-arm64-self-hosted 44 | python-version: '3.9' 45 | CONTAINER: 'python:3.9-bullseye' 46 | - os: linux-arm64-self-hosted 47 | python-version: '3.10' 48 | CONTAINER: 'python:3.10-bullseye' 49 | - os: linux-arm64-self-hosted 50 | python-version: '3.11' 51 | CONTAINER: 'python:3.11-bullseye' 52 | - os: linux-arm64-self-hosted 53 | python-version: '3.12' 54 | CONTAINER: 'python:3.12-bullseye' 55 | - os: linux-arm64-self-hosted 56 | python-version: '3.13' 57 | CONTAINER: 'python:3.13-bullseye' 58 | 59 | 60 | # Use python container on ARM 61 | container: ${{ matrix.CONTAINER }} 62 | 63 | steps: 64 | - name: Checkout repository 65 | uses: actions/checkout@v4 66 | 67 | - name: Setup Python 68 | # Skip setting python on ARM because of missing compatibility: https://github.com/actions/setup-python/issues/108 69 | if: matrix.os != 'linux-armv7-self-hosted' && matrix.os != 'linux-arm64-self-hosted' 70 | uses: actions/setup-python@v5 71 | with: 72 | python-version: ${{ matrix.python-version }} 73 | 74 | 75 | - name: Get Python version 76 | run: | 77 | python --version 78 | python -m pip install --upgrade pip 79 | 80 | 81 | - name: Install dependencies 82 | run: python -m pip install -r build_requirements.txt 83 | 84 | - name: Install additional OS dependencies - Ubuntu 85 | if: matrix.os == 'ubuntu-latest' 86 | run: os_dependencies/ubuntu.sh 87 | 88 | - name: Install additional OS dependencies - MacOS 89 | if: matrix.os == 'macos-latest' || matrix.os == 'macos-13' 90 | run: os_dependencies/macos.sh 91 | 92 | 93 | - name: Install additional OS dependencies - Linux ARM7 94 | if: matrix.os == 'linux-armv7-self-hosted' || matrix.os == 'linux-arm64-self-hosted' 95 | run: os_dependencies/linux_arm.sh 96 | 97 | - name: Download artifacts 98 | uses: actions/download-artifact@v4 99 | with: 100 | name: dependent_requirements_${{ matrix.os}} 101 | path: dependent_requirements_${{ matrix.os}} 102 | 103 | - name: Print requirements 104 | if: matrix.os != 'windows-latest' 105 | run: cat dependent_requirements_${{ matrix.os}}/dependent_requirements.txt 106 | 107 | - name: Print requirements - Windows 108 | if: matrix.os == 'windows-latest' 109 | run: type dependent_requirements_${{ matrix.os}}\\dependent_requirements.txt 110 | 111 | - name: Build Python dependent wheels for ${{ matrix.python-version }} 112 | if: matrix.os != 'windows-latest' 113 | run: | 114 | # Rust directory needs to be included for Linux ARM7 115 | if [ "${{ matrix.os }}" = "linux-armv7-self-hosted" ]; then 116 | export PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1 117 | . $HOME/.cargo/env 118 | fi 119 | 120 | python build_wheels_from_file.py dependent_requirements_${{ matrix.os}} 121 | 122 | - name: Build Python dependent wheels for ${{ matrix.python-version }} - Windows 123 | if: matrix.os == 'windows-latest' 124 | run: python build_wheels_from_file.py dependent_requirements_${{ matrix.os}} 125 | 126 | 127 | - name: Upload artifacts 128 | uses: actions/upload-artifact@v4 129 | with: 130 | name: wheels-download-directory-${{ matrix.os }}-${{ matrix.python-version }} 131 | if-no-files-found: ignore 132 | path: ./downloaded_wheels 133 | retention-days: 1 134 | -------------------------------------------------------------------------------- /.github/workflows/create-release.yml: -------------------------------------------------------------------------------- 1 | name: create-release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | create_release: 10 | name: Create release 11 | runs-on: ubuntu-latest 12 | outputs: 13 | upload_url: ${{ steps.create_release.outputs.upload_url }} 14 | steps: 15 | - name: Create Release 16 | id: create_release 17 | uses: actions/create-release@v1 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | with: 21 | tag_name: ${{ github.ref }} 22 | release_name: Release ${{ github.ref }} 23 | draft: false 24 | prerelease: false 25 | -------------------------------------------------------------------------------- /.github/workflows/danger.yml: -------------------------------------------------------------------------------- 1 | name: DangerJS Pull Request linter 2 | on: 3 | pull_request_target: 4 | types: [opened, edited, reopened, synchronize] 5 | 6 | permissions: 7 | pull-requests: write 8 | contents: write 9 | 10 | jobs: 11 | pull-request-style-linter: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Check out PR head 15 | uses: actions/checkout@v4 16 | with: 17 | ref: ${{ github.event.pull_request.head.sha }} 18 | 19 | - name: DangerJS pull request linter 20 | uses: espressif/shared-github-dangerjs@v1 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /.github/workflows/issue_comment.yml: -------------------------------------------------------------------------------- 1 | name: Sync issue comments to JIRA 2 | 3 | # This workflow will be triggered when new issue comment is created (including PR comments) 4 | on: issue_comment 5 | 6 | jobs: 7 | sync_issue_comments_to_jira: 8 | name: Sync Issue Comments to Jira 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@master 12 | - name: Sync issue comments to JIRA 13 | uses: espressif/github-actions/sync_issues_to_jira@master 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | JIRA_PASS: ${{ secrets.JIRA_PASS }} 17 | JIRA_PROJECT: IDFGH 18 | JIRA_COMPONENT: tools 19 | JIRA_URL: ${{ secrets.JIRA_URL }} 20 | JIRA_USER: ${{ secrets.JIRA_USER }} 21 | -------------------------------------------------------------------------------- /.github/workflows/new_issues.yml: -------------------------------------------------------------------------------- 1 | name: Sync issues to Jira 2 | 3 | # This workflow will be triggered when a new issue is opened 4 | on: issues 5 | 6 | jobs: 7 | sync_issues_to_jira: 8 | name: Sync issues to Jira 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@master 12 | - name: Sync GitHub issues to Jira project 13 | uses: espressif/github-actions/sync_issues_to_jira@master 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | JIRA_PASS: ${{ secrets.JIRA_PASS }} 17 | JIRA_PROJECT: IDFGH 18 | JIRA_COMPONENT: tools 19 | JIRA_URL: ${{ secrets.JIRA_URL }} 20 | JIRA_USER: ${{ secrets.JIRA_USER }} 21 | -------------------------------------------------------------------------------- /.github/workflows/new_prs.yml: -------------------------------------------------------------------------------- 1 | name: Sync remain PRs to Jira 2 | 3 | # This workflow will be triggered every hour, to sync remaining PRs (i.e. PRs with zero comment) to Jira project 4 | # Note that, PRs can also get synced when new PR comment is created 5 | on: 6 | schedule: 7 | - cron: "0 * * * *" 8 | 9 | jobs: 10 | sync_prs_to_jira: 11 | name: Sync PRs to Jira 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@master 15 | - name: Sync PRs to Jira project 16 | uses: espressif/github-actions/sync_issues_to_jira@master 17 | with: 18 | cron_job: true 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | JIRA_PASS: ${{ secrets.JIRA_PASS }} 22 | JIRA_PROJECT: IDFGH 23 | JIRA_COMPONENT: tools 24 | JIRA_URL: ${{ secrets.JIRA_URL }} 25 | JIRA_USER: ${{ secrets.JIRA_USER }} 26 | -------------------------------------------------------------------------------- /.github/workflows/only-create-and-upload-index.yml: -------------------------------------------------------------------------------- 1 | name: only-create-and-upload-index 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | triage: 8 | name: Create and Upload Index 9 | runs-on: ubuntu-latest 10 | strategy: 11 | fail-fast: false 12 | env: 13 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 14 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 15 | AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} 16 | AWS_BUCKET: ${{ secrets.DL_BUCKET }} 17 | PREFIX: 'pypi' 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v4 21 | 22 | - name: Install dependencies 23 | run: python -m pip install -r build_requirements.txt 24 | 25 | - name: Create and upload Index to S3 bucket 26 | run: | 27 | python create_index_pages.py $AWS_BUCKET 28 | 29 | - name: Drop AWS cache 30 | id: invalidate-index-cache 31 | run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CACHE_INVALIDATION }} --paths "/pypi/*" 32 | -------------------------------------------------------------------------------- /.github/workflows/upload-python-wheels.yml: -------------------------------------------------------------------------------- 1 | name: upload-python-wheels 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | triage: 8 | name: Upload Python wheels 9 | runs-on: ubuntu-latest 10 | strategy: 11 | fail-fast: false 12 | env: 13 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 14 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 15 | AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} 16 | AWS_BUCKET: ${{ secrets.DL_BUCKET }} 17 | PREFIX: 'pypi' 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v4 21 | 22 | - name: Install dependencies 23 | run: python -m pip install -r build_requirements.txt 24 | 25 | - name: Download artifacts 26 | uses: actions/download-artifact@v4 27 | with: 28 | path: ./downloaded_wheels 29 | 30 | - name: Upload release asset to S3 bucket 31 | run: | 32 | python upload_wheels.py $AWS_BUCKET 33 | python create_index_pages.py $AWS_BUCKET 34 | 35 | - name: Drop AWS cache 36 | id: invalidate-index-cache 37 | run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CACHE_INVALIDATION }} --paths "/pypi/*" 38 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | --- 4 | minimum_pre_commit_version: 3.3.0 5 | default_install_hook_types: [pre-commit,commit-msg] 6 | 7 | repos: 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: trailing-whitespace 12 | - id: end-of-file-fixer 13 | - id: check-executables-have-shebangs 14 | - id: mixed-line-ending 15 | args: ['-f=lf'] 16 | - id: double-quote-string-fixer 17 | - id: check-yaml 18 | 19 | # Replaces flake8 20 | - repo: https://github.com/astral-sh/ruff-pre-commit 21 | rev: v0.6.9 22 | hooks: 23 | - id: ruff 24 | args: [--fix, --exit-non-zero-on-fix] 25 | 26 | - repo: https://github.com/asottile/reorder_python_imports 27 | rev: v3.13.0 28 | hooks: 29 | - id: reorder-python-imports 30 | 31 | - repo: https://github.com/pre-commit/mirrors-mypy 32 | rev: v1.11.2 33 | hooks: 34 | - id: mypy 35 | additional_dependencies: ['types-all-latest'] 36 | 37 | - repo: https://github.com/espressif/conventional-precommit-linter 38 | rev: v1.10.0 39 | hooks: 40 | - id: conventional-precommit-linter 41 | stages: [commit-msg] 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ESP idf-python-wheels 2 | 3 | This project automates the build and upload process of required Python Wheels by [ESP-IDF]. The wheels for multiple OSes and architectures are being built. 4 | 5 | Supported architectures: 6 | * Linux 7 | - Ubuntu - x86_64 8 | - ARMv7 - arm32 9 | - ARM64 10 | * Windows - AMD64 11 | * MacOS 12 | - x86_64 13 | - ARM64 14 | 15 | Supported Python versions: 16 | * 3.8 17 | * 3.9 18 | * 3.10 19 | * 3.11 20 | * 3.12 21 | * 3.13 22 | 23 | For each `release` branch of [ESP-IDF] starting from the version defined in GitHub variables and [ESP-IDF] `master` branch all the requirements and constraints files are automatically downloaded and wheels are built and uploaded. 24 | 25 | 26 | ## Configuration 27 | `MIN_IDF_MAJOR_VERSION` and `MIN_IDF_MINOR_VERSION` GitHub variables can be set in project settings 28 | to change the [ESP-IDF] `release` branches to build wheels for. If these variables are not set (not in the environment for [build-wheels-platforms.yml](./.github/workflows/build-wheels-platforms.yml)), the default values specified in [build_wheels.py](./build_wheels.py) are used. 29 | 30 | 31 | ## Usage of manual wheels build - defined wheels workflow 32 | If there is a need to manually build and upload wheels the `defined-wheels` workflow can be used for this. The pip package needs to be specified with marker support (e.g. coredump~=1.2;sys_platform!='win32') and check the architectures which should be wheels built and uploaded for. Multiple wheels can be separated by space. 33 | 34 | Then the wheels are built and uploaded for all supported Python versions. 35 | 36 | 37 | ## Requirements lists 38 | These lists are files for requirements that should be added or excluded from the main requirements list which is automatically assembled. 39 | 40 | ### exclude_list.yaml 41 | File for excluded Python packages in the **main requirements** list. 42 | 43 | This YAML file is converted to `Requirement` from `packaging.requirements` because `pip` can handle this format, so the function for converting is designed to be compatible with [PEP508](https://peps.python.org/pep-0508/) scheme. 44 | The opposite logic of exclude_list is handled by the function itself, which means it is supposed to be easy to use for developers, this is also the reason YAML format is used. 45 | 46 | For every `package_name` there are options: 47 | * `version` 48 | - supports all logic operators defined by [PEP508](https://peps.python.org/pep-0508/) for versions (<, >, !=, etc.) 49 | * `platform` 50 | * `python` 51 | 52 | which could be a string or a list of strings. 53 | 54 | exclude_list template: 55 | 56 | - package_name: '' 57 | version: '' / ['', ''] # optional 58 | platform: '' / ['', '', ''] # optional 59 | python: '' / ['', '', ''] # optional 60 | 61 | The syntax can be converted into a sentence: "From assembled **main requirements** exclude `package_name` with `version` on `platform` for `python` version". 62 | 63 | example: 64 | 65 | - package_name: 'pyserial' 66 | version: ['>=3.3', '<3.6'] 67 | platform: ['win32', 'linux', 'darwin'] 68 | python: '>=3.9' 69 | 70 | This would mean: "From assembled **main requirements** exclude `pyserial` with version `>=3.3` and `<3.6` on platform `win32`, `linux`, `darwin` for `python` version `>=3.9`". 71 | 72 | From the example above is clear that the `platform` could be left out (because all main platforms are specified) so the options `platform` or `version` or `python` are optional, one of them or both can be not specified and the key can be erased. When only `package_name` is given the package will be excluded from **main requirements**. 73 | 74 | 75 | ### include_list.yaml 76 | File for additional Python packages to the **main requirements** list. Built separately to not restrict the **main requirements** list. 77 | 78 | The syntax can be also converted into a sentence: "For assembled **main requirements** additionally include `package_name` with `version` on `platform` for `python` version". 79 | 80 | 81 | ### build_requirements.txt 82 | File for the requirements needed for the build process and the build script. 83 | 84 | ### os_dependencies 85 | When there is a need for additional OS dependencies to successfully build the wheels on a specific platform and architecture, the `.sh` script in the `os_dependencies` directory can be adjusted. 86 | 87 | ## Activity diagram 88 | The main file is `build-wheels-platforms.yml` which is scheduled to run periodically to build Python wheels for any requirement of all [ESP-IDF]-supported versions. 89 | ![IDF Python wheels - Activity diagram](./resources/idf-python-wheels_diagram.svg "IDF Python wheels - Activity diagram") 90 | 91 | *The diagram was generated with the open-source tool [PlantUML](https://plantuml.com) (and edited)* 92 | 93 | > [!NOTE] 94 | > **Python version dependent wheels explanation** 95 | > 96 | > Python dependent wheels are wheels which depend on the [CPython’s Application Binary Interface (ABI)](https://docs.python.org/3/c-api/stable.html). These are checked based on the [wheel filename format](https://peps.python.org/pep-0491/#file-format) where the `abi tag` is checked for `cp`. Such wheels need to be build also for all supported Python versions, not only for the minimum Python version supported by [ESP-IDF]. 97 | 98 | 99 | [ESP-IDF]: https://github.com/espressif/esp-idf 100 | -------------------------------------------------------------------------------- /_helper_functions.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | from colorama import Fore 7 | from colorama import Style 8 | from packaging.requirements import Requirement 9 | 10 | 11 | def print_color(text:str, color:str = Fore.BLUE): 12 | """Print colored text specified by color argument based on colorama 13 | - default color BLUE 14 | """ 15 | print(f'{color}', f'{text}', Style.RESET_ALL) 16 | 17 | 18 | def merge_requirements(requirement:Requirement, another_req:Requirement) -> Requirement: 19 | """Merges two requirements into one requirement.""" 20 | new_ver_specifier = '' 21 | new_markers = '' 22 | if requirement.specifier and another_req.specifier: 23 | if not another_req.marker and ('==' not in str(requirement.specifier) 24 | and '!=' not in str(requirement.specifier)): 25 | new_ver_specifier = f'{requirement.specifier},{another_req.specifier}' 26 | else: 27 | new_ver_specifier = another_req.specifier 28 | elif requirement.specifier and not another_req.specifier: 29 | new_ver_specifier = requirement.specifier 30 | elif not requirement.specifier and another_req.specifier: 31 | new_ver_specifier = another_req.specifier 32 | 33 | if requirement.marker and another_req.marker: 34 | new_markers = f'({requirement.marker}) and ({another_req.marker})' 35 | elif requirement.marker and not another_req.marker: 36 | new_markers = requirement.marker 37 | elif not requirement.marker and another_req.marker: 38 | new_markers = another_req.marker 39 | 40 | # construct new requirement 41 | new_requirement = Requirement( 42 | f'{requirement.name}{new_ver_specifier}' + (f'; {new_markers}' if new_markers else '') 43 | ) 44 | 45 | return new_requirement 46 | -------------------------------------------------------------------------------- /build_requirements.txt: -------------------------------------------------------------------------------- 1 | # This is a list of Python packages needed for build process and script. This file is used with pip. 2 | # ----- build script ----- 3 | requests~=2.31.0 4 | packaging~=23.2 5 | PyYAML~=6.0.1 6 | colorama~=0.4.6 7 | # ----- build process ----- 8 | boto3~=1.34.4 9 | -------------------------------------------------------------------------------- /build_wheels.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | import json 7 | import os 8 | import re 9 | import subprocess 10 | import sys 11 | from typing import Dict 12 | from typing import List 13 | from typing import Optional 14 | from typing import Union 15 | 16 | import requests 17 | from colorama import Fore 18 | from packaging.requirements import InvalidRequirement 19 | from packaging.requirements import Requirement 20 | 21 | from _helper_functions import merge_requirements 22 | from _helper_functions import print_color 23 | from yaml_list_adapter import YAMLListAdapter 24 | 25 | # GLOBAL VARIABLES 26 | # URL to fetch IDF branches from 27 | IDF_BRANCHES_URL = 'https://api.github.com/repos/espressif/esp-idf/branches?protected=true&per_page=100' 28 | # URL to download constraints file from (vX.Y.txt part is auto-completed) 29 | IDF_CONSTRAINTS_URL = 'https://dl.espressif.com/dl/esp-idf/espidf.constraints.' 30 | # URL for IDF 'resources' root directory for requirements paths 31 | IDF_RESOURCES_URL = 'https://raw.githubusercontent.com/espressif/esp-idf/' 32 | # URL for IDF master CMAKE version file 33 | IDF_MASTER_VERSION_URL = f'{IDF_RESOURCES_URL}master/tools/cmake/version.cmake' 34 | 35 | # Minimal IDF release version to take requirements from (v{MAJOR}.{MINOR}) 36 | # Requirements from all release branches and master equal or above this will be considered 37 | # Specified in Github variables 38 | MIN_IDF_MAJOR_VERSION: int = int(os.environ.get('MIN_IDF_MAJOR_VERSION', '5')) 39 | MIN_IDF_MINOR_VERSION: int = int(os.environ.get('MIN_IDF_MINOR_VERSION', '0')) 40 | 41 | # GH token for the authenticated requests to extend the limit 42 | GH_TOKEN: str = os.environ.get('GH_TOKEN', '') 43 | 44 | # Authentication header 45 | AUTH_HEADER: Dict[str, str] = { 46 | 'authorization': f'Bearer {GH_TOKEN}', 47 | 'content-type': 'application/json' 48 | } 49 | 50 | print(f'ENV variables: IDF v{MIN_IDF_MAJOR_VERSION}.{MIN_IDF_MINOR_VERSION}' 51 | f' -- grater or equal release and master branches will be considered' 52 | ) 53 | 54 | 55 | def check_response(response: requests.Response, warning: str, exit_on_wrong: bool = False) -> bool: 56 | """Print warning or exit the script when response code is not correct""" 57 | if response.status_code == 200: 58 | return True 59 | if exit_on_wrong: 60 | raise SystemExit(f'{warning}\n{response.text}') 61 | print_color(f'{warning}\n', Fore.LIGHTRED_EX) 62 | return False 63 | 64 | 65 | # ESP-IDF branches list 66 | def fetch_idf_branches() -> List[str]: 67 | """Fetch IDF branches from URL specified in global variables""" 68 | res = requests.get(IDF_BRANCHES_URL, headers=AUTH_HEADER, timeout=10) 69 | if check_response(res, 'Failed to fetch ESP-IDF branches.', True): 70 | return [branch['name'] for branch in res.json()] 71 | return [] 72 | 73 | 74 | def get_used_idf_branches(idf_repo_branches: List[str]) -> List[str]: 75 | """Take only IDF master and release branches, only equal or grater version specified in Github variables""" 76 | idf_branches: List[str] = [] 77 | for branch in idf_repo_branches: 78 | idf_release = re.match(r'release/v(\d+)\.(\d+)', branch) 79 | 80 | if not idf_release: 81 | continue 82 | 83 | idf_major, idf_minor = map(int, idf_release.groups()) 84 | 85 | if (idf_major, idf_minor) < (MIN_IDF_MAJOR_VERSION, MIN_IDF_MINOR_VERSION): 86 | continue 87 | 88 | idf_branches.append(branch) 89 | 90 | idf_branches.append('master') 91 | return idf_branches 92 | 93 | 94 | # Constraints files versions list 95 | def _idf_version_from_cmake() -> Optional[dict]: 96 | """Get IDF master branch version from version.cmake""" 97 | res = requests.get(IDF_MASTER_VERSION_URL, headers=AUTH_HEADER, timeout=10) 98 | if check_response(res, 'Failed to get master version of IDF from CMAKE.'): 99 | regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)') 100 | lines = res.text.splitlines() 101 | 102 | idf_master_ver: dict = {} 103 | for line in lines: 104 | ver = regex.match(line) 105 | if ver: 106 | idf_master_ver[ver.group(1)] = ver.group(2) 107 | return idf_master_ver 108 | return None 109 | 110 | 111 | def get_constraints_versions(idf_branches: List[str]) -> List[str]: 112 | """From desired branches passed in get constraints files versions list 113 | - when branch is not 'release' (without version) it is supposed to be 'master' 114 | and auto version mechanism is applied if not specified in Github variables or manual workflow not to 115 | """ 116 | idf_constraints: List[str] = [] 117 | 118 | for branch in idf_branches: 119 | # Handle release branches 120 | if 'release/' in branch: 121 | idf_constraints.append(branch.split('release/')[1]) 122 | continue 123 | 124 | # Handle master branch 125 | idf_master_ver = _idf_version_from_cmake() 126 | 127 | # when IDF version not set correctly and CMAKE version for master is not downloaded 128 | if idf_branches[0] == 'master' and idf_master_ver is None: 129 | idf_constraints.append('None') 130 | continue 131 | 132 | if idf_master_ver is not None: 133 | next_master_version = f'v{idf_master_ver["MAJOR"]}.{idf_master_ver["MINOR"]}' 134 | idf_constraints.append(next_master_version) 135 | 136 | return idf_constraints 137 | 138 | 139 | # --- Download all requirements from all the branches requirements and constraints files --- # 140 | def _download_branch_requirements(branch: str, idf_requirements_json: dict) -> List[str]: 141 | """Download requirements files for all groups specified in IDF requirements.JSON""" 142 | print_color(f'---------- ESP-IDF BRANCH {branch} ----------') 143 | requirements_txt: List[str] = [] 144 | 145 | for feature in idf_requirements_json['features']: 146 | res = requests.get( 147 | f"{IDF_RESOURCES_URL}{branch}/{feature['requirement_path']}", headers=AUTH_HEADER, timeout=10 148 | ) 149 | if check_response(res, f"Failed to download feature (requirement group) '{feature['name']}'"): 150 | requirements_txt += res.text.splitlines() 151 | print(f"Added ESP-IDF {feature['name']} requirements") 152 | return requirements_txt 153 | 154 | 155 | def _download_branch_constraints(constraint_file_url: str, branch, idf_constraint: str) -> List[str]: 156 | """Download constraints file for specific branch""" 157 | res = requests.get(constraint_file_url, headers=AUTH_HEADER, timeout=10) 158 | if check_response(res, f'Failed to download ESP-IDF constraints file {idf_constraint} for branch {branch}'): 159 | requirements_txt = res.text.splitlines() 160 | print(f'Added ESP-IDF constraints file {idf_constraint} for branch {branch}') 161 | return requirements_txt 162 | return [] 163 | 164 | 165 | non_classic_requirement:List[str] = [] 166 | def _add_into_requirements(requirements_txt: List[str]) -> set: 167 | """Create set of requirements from downloaded lines of requirements 168 | - set is used to prevent duplicates 169 | """ 170 | requirements_set: set[Union[Requirement, str]] = set() 171 | for line in map(str.strip, requirements_txt): 172 | # check if in the line or the line itself is not a comment 173 | line = line.split('#', 1)[0] 174 | if line: 175 | try: 176 | requirements_set.add(Requirement(line)) 177 | except InvalidRequirement: 178 | # Non classic requirement (e.g. '--only-binary cryptography') 179 | non_classic_requirement.append(line) 180 | return requirements_set 181 | 182 | 183 | def assemble_requirements(idf_branches: List[str], idf_constraints: List[str], make_txt_file:bool=False) -> set: 184 | """Assemble IDF requirements into set to prevent duplicates""" 185 | requirements_txt: List[str] = [] 186 | 187 | for i, branch in enumerate(idf_branches): 188 | idf_requirements_json_url = f'{IDF_RESOURCES_URL}{branch}/tools/requirements.json' 189 | constraint_file_url = f'https://dl.espressif.com/dl/esp-idf/espidf.constraints.{idf_constraints[i]}.txt' 190 | 191 | res = requests.get(idf_requirements_json_url, headers=AUTH_HEADER, timeout=10) 192 | if not check_response(res, f'\nFailed to download requirements JSON for branch {branch}'): 193 | continue 194 | 195 | idf_requirements_json = json.loads(res.content) 196 | 197 | requirements_txt += _download_branch_requirements(branch, idf_requirements_json) 198 | requirements_txt += _download_branch_constraints(constraint_file_url, branch, idf_constraints[i]) 199 | 200 | if make_txt_file: 201 | # TXT file from all downloaded requirements and constraints files 202 | # useful for debugging or to see the comments for requirements 203 | with open('requirements.txt', 'w') as f: 204 | f.write('\n'.join(requirements_txt)) 205 | 206 | return _add_into_requirements(requirements_txt) 207 | 208 | 209 | def exclude_from_requirements(assembled_requirements:set, exclude_list: set, print_requirements: bool = True) -> set: 210 | """Exclude packages defined in exclude_list from assembled requirements 211 | - print_requirements = true will print the changes 212 | """ 213 | new_assembled_requirements = set() 214 | not_in_exclude = [] 215 | if print_requirements: 216 | print_color('---------- REQUIREMENTS ----------') 217 | 218 | for requirement in assembled_requirements: 219 | printed = False 220 | for req_to_exclude in exclude_list: 221 | if req_to_exclude.name not in requirement.name: 222 | not_in_exclude.append(True) 223 | else: 224 | if not req_to_exclude.specifier and not req_to_exclude.marker: 225 | # Delete requirement 226 | if print_requirements: 227 | print_color(f'-- {requirement}', Fore.RED) 228 | continue 229 | 230 | # Merge requirement and requirement_from_exclude list 231 | new_requirement = merge_requirements(requirement, req_to_exclude) 232 | new_assembled_requirements.add(new_requirement) 233 | 234 | if print_requirements: 235 | if not printed: 236 | print_color(f'-- {requirement}', Fore.RED) 237 | printed = True 238 | print_color(f'++ {new_requirement}', Fore.GREEN) 239 | 240 | # Add back unchanged requirement 241 | if len(not_in_exclude) == len(exclude_list): 242 | if print_requirements: 243 | print(str(requirement)) 244 | new_assembled_requirements.add(requirement) 245 | 246 | not_in_exclude.clear() 247 | 248 | if print_requirements: 249 | print_color('---------- END OF REQUIREMENTS ----------') 250 | 251 | return new_assembled_requirements 252 | 253 | 254 | # --- Build wheels --- 255 | def build_wheels(requirements: set, local_links:bool = True) -> dict: 256 | """Build Python wheels 257 | - 'failed' - failed wheels counter 258 | - 'succeeded' - succeeded wheels counter 259 | """ 260 | failed_wheels = 0 261 | succeeded_wheels = 0 262 | 263 | dir = f'{os.path.curdir}{(os.sep)}downloaded_wheels' 264 | for requirement in requirements: 265 | # non classic requirement wheel build 266 | if non_classic_requirement: 267 | pattern = re.compile(r'(--[^ ]*)(.*)') 268 | match = pattern.search(non_classic_requirement[0]) 269 | if match: 270 | argument = match.group(1).strip() 271 | arg_param = match.group(2).strip() 272 | if arg_param in requirement.name: 273 | out = subprocess.run( 274 | [f'{sys.executable}', '-m', 'pip', 'wheel', f'{requirement}', 275 | '--find-links', f'{dir}', '--wheel-dir', f'{dir}', 276 | f'{argument}', f'{arg_param}'], 277 | stdout=subprocess.PIPE, stderr=subprocess.PIPE 278 | ) 279 | print(out.stdout.decode('utf-8')) 280 | if out.stderr: 281 | print_color(out.stderr.decode('utf-8'), Fore.RED) 282 | non_classic_requirement.remove(non_classic_requirement[0]) 283 | continue 284 | 285 | # requirement wheel build 286 | out = subprocess.run( 287 | [f'{sys.executable}', '-m', 'pip', 'wheel', f'{requirement}', 288 | '--find-links', f'{dir}', '--wheel-dir', f'{dir}'], 289 | stdout=subprocess.PIPE, stderr=subprocess.PIPE 290 | ) 291 | 292 | print(out.stdout.decode('utf-8')) 293 | if out.stderr: 294 | print_color(out.stderr.decode('utf-8'), Fore.RED) 295 | 296 | if out.returncode != 0: 297 | failed_wheels += 1 298 | else: 299 | succeeded_wheels += 1 300 | 301 | return {'failed': failed_wheels, 'succeeded': succeeded_wheels} 302 | 303 | 304 | def get_python_dependent_wheels(wheel_dir:str, requirements:set) -> set: 305 | """Get Python dependent requirements from downloaded wheel directory""" 306 | dependent_wheels_set = set() 307 | dependent_requirements_set = set() 308 | 309 | file_names = os.listdir(wheel_dir) 310 | 311 | # find dependent wheels in wheel directory 312 | for wheel in file_names: 313 | pattern = re.compile(r'([^ -]*)-(\d+(\.\d+)*).*?(cp\d+)') 314 | match = pattern.search(wheel) 315 | if match is not None: 316 | wheel_name = match.group(1) 317 | wheel_version = match.group(2) 318 | build = match.group(3) 319 | 320 | dependent_wheels_set.add((wheel_name, wheel_version, build)) 321 | 322 | # find dependent wheel in requirements 323 | for name, version, _ in dependent_wheels_set: 324 | for requirement in requirements: 325 | if requirement.marker: 326 | if 'python_version' in str(requirement.marker): 327 | # add python version specific requirements from all branches 328 | dependent_requirements_set.add(requirement) 329 | 330 | if name.lower() == requirement.name.lower(): 331 | # add requirements with markers 332 | dependent_requirements_set.add(requirement) 333 | else: 334 | # add downloaded and already built requirements (all dependencies) 335 | dependent_requirements_set.add(Requirement(f'{name}=={version}')) 336 | 337 | return dependent_requirements_set 338 | 339 | 340 | def main() -> int: 341 | """Builds Python wheels for ESP-IDF dependencies for master and release branches 342 | grater or equal to specified""" 343 | 344 | idf_repo_branches = fetch_idf_branches() 345 | idf_branches = get_used_idf_branches(idf_repo_branches) 346 | print(f'ESP-IDF branches to be downloaded requirements for:\n{idf_branches}\n') 347 | 348 | idf_constraints = get_constraints_versions(idf_branches) 349 | print(f'ESP-IDF constrains files versions to be downloaded requirements for:\n{idf_constraints}\n') 350 | 351 | requirements = assemble_requirements(idf_branches, idf_constraints, True) 352 | 353 | exclude_list = YAMLListAdapter('exclude_list.yaml', exclude=True).requirements 354 | 355 | after_exclude_requirements = exclude_from_requirements(requirements, exclude_list) 356 | 357 | include_list = YAMLListAdapter('include_list.yaml').requirements 358 | print_color('---------- ADDITIONAL REQUIREMENTS ----------') 359 | for req in include_list: 360 | print(req) 361 | print_color('---------- END OF ADDITIONAL REQUIREMENTS ----------') 362 | 363 | print_color('---------- BUILD ADDITIONAL WHEELS ----------') 364 | additional_whl = build_wheels(include_list) 365 | failed_wheels = additional_whl['failed'] 366 | succeeded_wheels = additional_whl['succeeded'] 367 | 368 | print_color('---------- BUILD WHEELS ----------') 369 | standard_whl = build_wheels(after_exclude_requirements) 370 | failed_wheels += standard_whl['failed'] 371 | succeeded_wheels += standard_whl['succeeded'] 372 | 373 | print_color('---------- STATISTICS ----------') 374 | print_color(f'Succeeded {succeeded_wheels} wheels', Fore.GREEN) 375 | print_color(f'Failed {failed_wheels} wheels', Fore.RED) 376 | 377 | if failed_wheels != 0: 378 | raise SystemExit('One or more wheels failed to build') 379 | 380 | print_color('---------- PYTHON VERSION DEPENDENT ----------') 381 | dependent_wheels = get_python_dependent_wheels(f'{os.path.curdir}{(os.sep)}downloaded_wheels', 382 | after_exclude_requirements) 383 | after_exclude_dependent_wheels = exclude_from_requirements(dependent_wheels, exclude_list) 384 | 385 | with open('dependent_requirements.txt', 'w') as f: 386 | for wheel in after_exclude_dependent_wheels: 387 | f.write(f'{str(wheel)}\n') 388 | 389 | return 0 390 | 391 | 392 | if __name__ == '__main__': 393 | main() 394 | -------------------------------------------------------------------------------- /build_wheels_from_file.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | import argparse 7 | import os 8 | import subprocess 9 | import sys 10 | 11 | from colorama import Fore 12 | 13 | from _helper_functions import print_color 14 | 15 | parser = argparse.ArgumentParser(description='Process build arguments.') 16 | parser.add_argument('requirements_path', metavar='Path', type=str, nargs='?', default='', 17 | help='path to Python version dependent requirements txt') 18 | 19 | parser.add_argument('-r', '--requirements', metavar='Requirement(s)', type=str, nargs='*', 20 | help='requirement(s) to be build wheel(s) for') 21 | 22 | args = parser.parse_args() 23 | 24 | 25 | requirements_dir = args.requirements_path 26 | in_requirements = args.requirements 27 | 28 | failed_wheels = 0 29 | succeeded_wheels = 0 30 | 31 | # Build wheels for requirements in file 32 | if requirements_dir: 33 | try: 34 | with open(f'{requirements_dir}{os.sep}dependent_requirements.txt', 'r') as f: 35 | requirements = f.readlines() 36 | except FileNotFoundError as e: 37 | raise SystemExit(f'Python version dependent requirements directory or file not found ({e})') 38 | 39 | for requirement in requirements: 40 | out = subprocess.run( 41 | [f'{sys.executable}', '-m', 'pip', 'wheel', f'{requirement}', 42 | '--find-links', 'downloaded_wheels', '--wheel-dir', 'downloaded_wheels'], 43 | stdout=subprocess.PIPE, stderr=subprocess.PIPE 44 | ) 45 | 46 | print(out.stdout.decode('utf-8')) 47 | if out.stderr: 48 | print_color(out.stderr.decode('utf-8'), Fore.RED) 49 | 50 | if out.returncode != 0: 51 | failed_wheels += 1 52 | else: 53 | succeeded_wheels += 1 54 | 55 | 56 | print_color('---------- STATISTICS ----------') 57 | print_color(f'Succeeded {succeeded_wheels} wheels', Fore.GREEN) 58 | print_color(f'Failed {failed_wheels} wheels', Fore.RED) 59 | 60 | if failed_wheels != 0: 61 | raise SystemExit('One or more wheels failed to build') 62 | 63 | # Build wheels from passed requirements 64 | else: 65 | for requirement in in_requirements: 66 | out = subprocess.run( 67 | [f'{sys.executable}', '-m', 'pip', 'wheel', f'{requirement}', 68 | '--find-links', 'downloaded_wheels', '--wheel-dir', 'downloaded_wheels'], 69 | stdout=subprocess.PIPE, stderr=subprocess.PIPE 70 | ) 71 | 72 | print(out.stdout.decode('utf-8')) 73 | if out.stderr: 74 | print_color(out.stderr.decode('utf-8'), Fore.RED) 75 | 76 | if out.returncode != 0: 77 | failed_wheels += 1 78 | else: 79 | succeeded_wheels += 1 80 | 81 | 82 | print_color('---------- STATISTICS ----------') 83 | print_color(f'Succeeded {succeeded_wheels} wheels', Fore.GREEN) 84 | print_color(f'Failed {failed_wheels} wheels', Fore.RED) 85 | 86 | if failed_wheels != 0: 87 | raise SystemExit('One or more wheels failed to build') 88 | -------------------------------------------------------------------------------- /create_index_pages.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | import re 7 | import sys 8 | from io import BytesIO 9 | from pathlib import Path 10 | from typing import Dict 11 | 12 | import boto3 13 | 14 | def _html_loader(path: str) -> str: 15 | """Loads the HTML file""" 16 | with open(path, 'r') as file: 17 | return file.read() 18 | 19 | HTML_HEADER = _html_loader('resources/html/header.html') 20 | HTML_PRETTY_HEADER = _html_loader('resources/html/pretty_header.html') 21 | HTML_FOOTER = _html_loader('resources/html/footer.html') 22 | 23 | DL_BUCKET = sys.argv[1] 24 | 25 | s3 = boto3.client('s3') 26 | 27 | paginator = s3.get_paginator('list_objects_v2') 28 | 29 | response_iterator = paginator.paginate( 30 | Bucket=DL_BUCKET, 31 | Prefix='pypi/' 32 | ) 33 | 34 | 35 | packages : Dict = {} 36 | for response in response_iterator: 37 | for package in response['Contents']: 38 | res = re.search(r'\/(.*)\/', format(package['Key'])) 39 | if not res: 40 | continue 41 | 42 | name = res.group(1).lower() 43 | 44 | # Skip the route for the human readable form of the PyPI 45 | if name == 'pretty': 46 | continue 47 | 48 | if name not in packages: 49 | packages[name] = [] 50 | 51 | packages[name].append(Path(package['Key']).name) 52 | 53 | # Remove not valid packages to not display them 54 | packages_new = packages.copy() 55 | for package_name, package_values in packages.items(): 56 | if len(package_values) < 1: 57 | packages_new.pop(package_name) 58 | if len(package_values) == 1: 59 | if package_values[0] == 'index.html': 60 | packages_new.pop(package_name) 61 | 62 | index = [] 63 | index_pretty = [] 64 | index.append(HTML_HEADER) 65 | index_pretty.append(HTML_PRETTY_HEADER) 66 | for name in packages_new.keys(): 67 | index.append(f' {name}/') 68 | index_pretty.append( 69 | f'
{name}Entries: {len(packages_new[name])}

' 70 | ) 71 | index.append(HTML_FOOTER) 72 | index_pretty.append(HTML_FOOTER) 73 | 74 | s3.upload_fileobj(BytesIO('\n'.join(index).encode('utf-8')), 75 | DL_BUCKET, 76 | 'pypi/index.html', 77 | ExtraArgs={'ACL': 'public-read', 'ContentType':'text/html'}) 78 | 79 | s3.upload_fileobj(BytesIO('\n'.join(index_pretty).encode('utf-8')), 80 | DL_BUCKET, 81 | 'pypi/pretty/index.html', 82 | ExtraArgs={'ACL': 'public-read', 'ContentType':'text/html'}) 83 | 84 | for name, filenames in packages_new.items(): 85 | index_wheel = [] 86 | index_wheel.append(HTML_HEADER) 87 | for fn in filenames: 88 | # Skip HTML source file of the wheel page 89 | if fn == 'index.html': 90 | continue 91 | index_wheel.append(f'{fn}
') 92 | index_wheel.append(HTML_FOOTER) 93 | 94 | s3.upload_fileobj(BytesIO('\n'.join(index_wheel).encode('utf-8')), 95 | DL_BUCKET, 96 | f'pypi/{name}/index.html', 97 | ExtraArgs={'ACL': 'public-read', 'ContentType':'text/html'}) 98 | -------------------------------------------------------------------------------- /exclude_list.yaml: -------------------------------------------------------------------------------- 1 | # List of Python packages to exclude from automatically assembled requirements 2 | #"From assembled
exclude with on for version". 3 | 4 | # exclude_list template 5 | #- package_name: '' 6 | # version: '' / ['', ''] # optional 7 | # platform: '' / ['', '', ''] # optional 8 | # python: '' / ['', '', ''] # optional 9 | 10 | # dbus-python can not be build on Windows 11 | - package_name: 'dbus-python' 12 | platform: ['win32'] 13 | 14 | # dbus-python can not be build with Python > 3.11 on MacOS 15 | - package_name: 'dbus-python' 16 | platform: 'darwin' 17 | python: '>3.11' 18 | 19 | # gevent==1.5.0 can not be build with Python > 3.8 20 | - package_name: 'gevent' 21 | version: '==1.5.0' 22 | python: '>3.8' 23 | 24 | # gdbgui==0.13.2.0 leads to installation of gevent 1.5.0 which can not be build 25 | - package_name: 'gdbgui' 26 | version: '==0.13.2.0' 27 | 28 | # Python 13.3 does not support Pillow 9.5.0 29 | - package_name: 'Pillow' 30 | version: '==9.5.0' 31 | python: '==3.13' 32 | 33 | # gdbgui leads to installation of greenlet which does not support Python 3.13 yet 34 | # issue: https://github.com/cs01/gdbgui/issues/494 35 | - package_name: 'gdbgui' 36 | python: '==3.13' 37 | # some versions of greenlet are not supported by Python 3.13 38 | - package_name: 'greenlet' 39 | version: '<3.0' 40 | python: '==3.13' 41 | 42 | # Python 3.13 does not support windows_curses on Windows 43 | # issue: https://github.com/zephyrproject-rtos/windows-curses/issues/69 44 | - package_name: 'windows_curses' 45 | platform: 'win32' 46 | python: '==3.13' 47 | 48 | # Python 13.3 does not support ruamel.yaml.clib 0.2.8 49 | - package_name: 'ruamel.yaml.clib' 50 | version: '==0.2.8' 51 | python: '==3.13' 52 | -------------------------------------------------------------------------------- /include_list.yaml: -------------------------------------------------------------------------------- 1 | # List of Python packages to additionally include to the automatically assembled requirements 2 | #"For assembled
additionally include with on for version". 3 | 4 | # include_list template 5 | #- package_name: '' 6 | # version: '' / ['', ''] # optional 7 | # platform: '' / ['', '', ''] # optional 8 | # python: '' / ['', '', ''] # optional 9 | 10 | - package_name: 'gdbgui' 11 | version: '>=0.15.2.0' 12 | platform: ['linux', 'darwin'] 13 | -------------------------------------------------------------------------------- /os_dependencies/linux_arm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | arch=$(uname -m) 4 | 5 | apt-get update 6 | 7 | # AWS 8 | apt-get install -y -q --no-install-recommends awscli 9 | 10 | # PyGObject needs build dependecies https://pygobject.readthedocs.io/en/latest/getting_started.html 11 | apt-get install libgirepository1.0-dev libgirepository-2.0-dev gcc libcairo2-dev pkg-config python3-dev -y 12 | 13 | # dbus-python build dependecies 14 | apt-get install libtiff5 libjpeg-dev libopenjp2-7 cmake libdbus-1-dev -y 15 | apt-get install -y --no-install-recommends python3-dev libdbus-glib-1-dev libgirepository1.0-dev libcairo2-dev -y 16 | apt-get install -y --no-install-recommends dbus-tests -y 17 | 18 | #Only ARMv7 19 | if [ "$arch" == "armv7l" ]; then 20 | # cryptography needs Rust 21 | # clean the container Rust installation to be sure right interpreter is used 22 | apt remove --auto-remove --purge rust-gdb rustc libstd-rust-dev libstd-rust-1.48 23 | # install Rust dependencies 24 | apt-get install -y build-essential libssl-dev libffi-dev python3-dev pkg-config gcc musl-dev 25 | # install Rust 26 | curl --proto '=https' --tlsv1.3 -sSf https://sh.rustup.rs | bash -s -- -y 27 | . $HOME/.cargo/env 28 | fi 29 | -------------------------------------------------------------------------------- /os_dependencies/macos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | arch=$(uname -m) 4 | 5 | # PyGObject needs build dependecies https://pygobject.readthedocs.io/en/latest/getting_started.html 6 | brew install pygobject3 gtk4 7 | 8 | 9 | # Only MacOS x86_64 additional dependencies 10 | if [ "$arch" == "x86_64" ]; then 11 | echo "x86_64 additional dependencies" 12 | fi 13 | 14 | # Only MacOS M1 additional dependencies 15 | if [ "$arch" == "arm64" ]; then 16 | echo "M1 additional dependencies" 17 | fi 18 | -------------------------------------------------------------------------------- /os_dependencies/ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo apt-get update -y 3 | 4 | # PyGObject needs build dependecies https://pygobject.readthedocs.io/en/latest/getting_started.html 5 | sudo apt install libgirepository1.0-dev libgirepository-2.0-dev gcc libcairo2-dev pkg-config python3-dev gir1.2-gtk-4.0 -y 6 | 7 | # dbus-python needs build dependecies 8 | sudo apt-get install cmake build-essential libdbus-1-dev libdbus-glib-1-dev -y 9 | 10 | # Pillow needs libjpeg-dev 11 | sudo apt-get install libjpeg-dev -y 12 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 120 3 | skip-string-normalization = true 4 | 5 | [tool.ruff] 6 | line-length = 120 7 | select = ['E', 'F', 'W'] 8 | target-version = "py38" 9 | 10 | [tool.mypy] 11 | disallow_incomplete_defs = false # Disallows defining functions with incomplete type annotations 12 | disallow_untyped_defs = false # Disallows defining functions without type annotations or with incomplete type annotations 13 | ignore_missing_imports = true # Suppress error messages about imports that cannot be resolved 14 | python_version = "3.9" # Specifies the Python version used to parse and check the target program 15 | warn_no_return = true # Shows errors for missing return statements on some execution paths 16 | warn_return_any = true # Shows a warning when returning a value with type Any from a function declared with a non- Any return type 17 | -------------------------------------------------------------------------------- /resources/espressif.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/espressif/idf-python-wheels/42ff8a44a53e5ea209d35c70647aebcb352e69be/resources/espressif.ico -------------------------------------------------------------------------------- /resources/html/footer.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /resources/html/header.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Simple index 6 | 7 | 8 | -------------------------------------------------------------------------------- /resources/html/pretty_header.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Simple index 6 | 7 | 8 | 12 |
13 | 14 | 15 | 16 | 40 | -------------------------------------------------------------------------------- /resources/idf-python-wheels_diagram.svg: -------------------------------------------------------------------------------- 1 | 2 | 17 | 36 | 38 | 47 | 56 | build-wheels-platforms.yml 66 | 73 | 83 | Install OS dependencies (os_dependencies directory) 93 | 103 | Build wheels for IDF (build_wheels.py) 116 |   126 | - minimum IDF-supported Python version 136 | 143 | 153 | Fetch all ESP-IDF branches 163 | 173 | Get supported ESP-IDF branches (specified from GitHub variables) 183 | 193 | Get constraints files for each ESP-IDF branches 203 | 213 | Assemble requirements 223 | - create requirements.txt 233 | - remove requirements defined by exclude_list.yaml 243 | - add additional requirements from include_list.yaml 253 | 263 | Build additional requirements 273 | - from include_list.yaml 283 | - print list of requirement to be built 295 | - print statistics 305 | 315 | Raise Error 325 | 332 | 339 | 345 | yes 355 | 1 or more wheel(s) build failed 365 | 375 | Build requirements 385 | - from requirements.txt after exclude 395 | -print list of requirement to be built 405 | - print statistics 415 | 421 | 1 or more wheel(s) build failed 431 | 441 | Get Python version dependent wheels 451 | - check built wheels in the directory 461 | - resolve Python dependent requirements from wheels 471 | -print list of requirement to be built 481 | - create dependent_requirements.txt 491 | 501 | Upload artifacts 511 | - wheels directory 521 | - dependent_requirements.txt 531 | 541 | build-wheels-python-dependent.yml 551 |   561 | - all IDF-supported Python versions 571 | 578 | 588 | Install OS dependencies (os_dependencies directory) 598 | 608 | Build requirements 618 | - build_wheels_from_file.py 628 | 638 | Upload artifacts 648 | - wheels directories 658 | 668 | upload-python-wheels.yml 678 | 685 | 695 | Download artifacts 705 | - wheels directories 715 | 725 | Upload to S3 735 | - upload_wheels.py 745 | - create_index_pages.py 755 | 762 | 769 | 776 | 782 | 789 | 795 | 800 | 806 | 813 | 819 | 826 | 832 | 839 | 845 | 852 | 858 | 865 | 871 | 878 | 884 | 891 | 896 | 902 | 907 | 917 | Raise Error 927 | 934 | 941 | yes 951 | 958 | 964 | 969 | 975 | 980 | 986 | 993 | 999 | 1006 | 1012 | 1019 | 1025 | 1032 | 1038 | 1045 | 1051 | 1058 | 1064 | 1071 | 1077 | 1084 | 1090 | 1097 | 1103 | 1110 | 1116 | 1123 | 1129 | 1136 | 1142 | 1147 | 1153 | 1158 | 1164 | 1165 | -------------------------------------------------------------------------------- /resources/idf-python-wheels_diagram_source.txt: -------------------------------------------------------------------------------- 1 | @startuml 2 | 3 | :build-wheels-platforms.yml; 4 | start 5 | 6 | :Install OS dependencies (os_dependencies directory); 7 | 8 | :Build wheels for IDF (build_wheels.py) 9 | 10 | - minimum IDF-supported Python version; 11 | start 12 | 13 | :Fetch all ESP-IDF branches; 14 | 15 | :Get supported ESP-IDF branches (specified from GitHub variables); 16 | 17 | :Get constraints files for each ESP-IDF branches; 18 | 19 | : Assemble requirements 20 | - create requirements.txt 21 | - remove requirements defined by exclude_list.yaml 22 | - add additional requirements from include_list.yaml; 23 | 24 | : Build additional requirements 25 | - from include_list.yaml 26 | - print list of requirement to be built 27 | - print statistics; 28 | if (1 or more wheel(s) build failed) then (yes) 29 | :Raise Error; 30 | stop 31 | endif 32 | 33 | : Build requirements 34 | - from requirements.txt after exclude 35 | - print list of requirement to be built 36 | - print statistics; 37 | if (1 or more wheel(s) build failed) then (yes) 38 | :Raise Error; 39 | stop 40 | endif 41 | 42 | : Get Python version dependent wheels 43 | - check built wheels in the directory 44 | - resolve Python dependent requirements from wheels 45 | - print list of requirement to be built 46 | - create dependent_requirements.txt; 47 | 48 | : Upload artifacts 49 | - wheels directory 50 | - dependent_requirements.txt; 51 | 52 | :build-wheels-python-dependent.yml 53 | 54 | - all IDF-supported Python versions; 55 | start 56 | 57 | :Install OS dependencies (os_dependencies directory); 58 | 59 | : Build requirements 60 | - build_wheels_from_file.py; 61 | 62 | : Upload artifacts 63 | - wheels directories; 64 | 65 | 66 | :upload-python-wheels.yml; 67 | start 68 | 69 | : Download artifacts 70 | - wheels directories; 71 | 72 | : Upload to S3 73 | - upload_wheels.py 74 | - create_index_pages.py; 75 | 76 | stop 77 | 78 | @enduml 79 | -------------------------------------------------------------------------------- /test/test_list.yaml: -------------------------------------------------------------------------------- 1 | # List of Python packages for testing 2 | # platform 3 | - package_name: 'platform' 4 | platform: 'win32' 5 | 6 | - package_name: 'platform' 7 | platform: ['win32', 'linux'] 8 | # version 9 | - package_name: 'version' 10 | version: '<42' 11 | 12 | - package_name: 'version' 13 | version: ['<42', '>50'] 14 | # Python 15 | - package_name: 'python' 16 | python: '>3.10' 17 | 18 | - package_name: 'python' 19 | python: ['>3.10', '!=3.8'] 20 | # version and platform 21 | - package_name: 'version-platform' 22 | version: '<=0.9.0.2' 23 | platform: 'win32' 24 | 25 | - package_name: 'version-platform' 26 | version: ['<=0.9.0.2', '>0.9.1'] 27 | platform: 'win32' 28 | 29 | - package_name: 'version-platform' 30 | version: '<=0.9.0.2' 31 | platform: ['win32', 'linux'] 32 | 33 | - package_name: 'version-platform' 34 | version: ['<=0.9.0.2', '>0.9.1'] 35 | platform: ['win32', 'linux'] 36 | # version and Python 37 | - package_name: 'version-python' 38 | version: '<=0.9.0.2' 39 | python: '<3.8' 40 | 41 | - package_name: 'version-python' 42 | version: ['<=0.9.0.2', '>0.9.1'] 43 | python: '<3.8' 44 | 45 | - package_name: 'version-python' 46 | version: '<=0.9.0.2' 47 | python: ['<3.8', '>3.11'] 48 | 49 | - package_name: 'version-python' 50 | version: ['<=0.9.0.2', '>0.9.1'] 51 | python: ['<3.8', '>3.11'] 52 | # platform and Python 53 | - package_name: 'platform-python' 54 | platform: 'win32' 55 | python: '<3.8' 56 | 57 | - package_name: 'platform-python' 58 | platform: ['win32', 'linux'] 59 | python: '<3.8' 60 | 61 | - package_name: 'platform-python' 62 | platform: 'win32' 63 | python: ['<3.8', '>3.11'] 64 | 65 | - package_name: 'platform-python' 66 | platform: ['win32', 'linux'] 67 | python: ['<3.8', '>3.11'] 68 | # version and platform and Python 69 | - package_name: 'version-platform-python' 70 | version: '<=0.9.0.2' 71 | platform: 'win32' 72 | python: '<3.8' 73 | 74 | - package_name: 'version-platform-python' 75 | version: ['<=0.9.0.2', '>0.9.1'] 76 | platform: 'win32' 77 | python: '<3.8' 78 | 79 | - package_name: 'version-platform-python' 80 | version: ['<=0.9.0.2', '>0.9.1'] 81 | platform: ['win32', 'linux'] 82 | python: '<3.8' 83 | 84 | - package_name: 'version-platform-python' 85 | version: '<=0.9.0.2' 86 | platform: ['win32', 'linux'] 87 | python: '<3.8' 88 | 89 | - package_name: 'version-platform-python' 90 | version: '<=0.9.0.2' 91 | platform: 'win32' 92 | python: ['<3.8', '>3.11'] 93 | 94 | - package_name: 'version-platform-python' 95 | version: ['<=0.9.0.2', '>0.9.1'] 96 | platform: 'win32' 97 | python: ['<3.8', '>3.11'] 98 | 99 | - package_name: 'version-platform-python' 100 | version: ['<=0.9.0.2', '>0.9.1'] 101 | platform: ['win32', 'linux'] 102 | python: ['<3.8', '>3.11'] 103 | 104 | - package_name: 'version-platform-python' 105 | version: '<=0.9.0.2' 106 | platform: ['win32', 'linux'] 107 | python: ['<3.8', '>3.11'] 108 | -------------------------------------------------------------------------------- /test_build_wheels.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E501 2 | # line too long skip in ruff for whole file (formatting would be worst than long lines) 3 | # 4 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 5 | # 6 | # SPDX-License-Identifier: Apache-2.0 7 | # 8 | import unittest 9 | 10 | from packaging.requirements import Requirement 11 | 12 | from build_wheels import _change_specifier_logic 13 | from build_wheels import yaml_to_requirement 14 | 15 | 16 | class TestYAMLtoRequirement(unittest.TestCase): 17 | 18 | def test_change_specifier_logic(self): 19 | version_with_specifier = (('>0.9.0.2', '<0.9.0.2'), 20 | ('<0.9.0.2', '>0.9.0.2'), 21 | ('==0.9.0.2', '!=0.9.0.2'), 22 | ('>=0.9.0.2', '<=0.9.0.2'), 23 | ('<=0.9.0.2', '>=0.9.0.2'), 24 | ('!=0.9.0.2', '==0.9.0.2'), 25 | ('===0.9.0.2', '===0.9.0.2'), 26 | ) 27 | 28 | for case in version_with_specifier: 29 | self.assertEqual(f'{_change_specifier_logic(case[0])[0]}{_change_specifier_logic(case[0])[1]}', case[1]) 30 | 31 | def test_yaml_to_requirement(self): 32 | test_requirements = {Requirement("platform;sys_platform == 'win32'"), 33 | Requirement("platform;sys_platform == 'win32' or sys_platform == 'linux'"), 34 | Requirement('version<42'), 35 | Requirement('version<42,>50'), 36 | Requirement("python;python_version > '3.10'"), 37 | Requirement("python;python_version > '3.10' and python_version != '3.8'"), 38 | Requirement("version-platform<=0.9.0.2;sys_platform == 'win32'"), 39 | Requirement("version-platform<=0.9.0.2,>0.9.1;sys_platform == 'win32'"), 40 | Requirement("version-platform<=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux'"), 41 | Requirement("version-platform<=0.9.0.2,>0.9.1;sys_platform == 'win32' or sys_platform == 'linux'"), 42 | Requirement("version-python<=0.9.0.2;python_version < '3.8'"), 43 | Requirement("version-python<=0.9.0.2,>0.9.1;python_version < '3.8'"), 44 | Requirement("version-python<=0.9.0.2;python_version < '3.8' and python_version > '3.11'"), 45 | Requirement("version-python<=0.9.0.2,>0.9.1;python_version < '3.8' and python_version > '3.11'"), 46 | Requirement("platform-python;sys_platform == 'win32' and python_version < '3.8'"), 47 | Requirement("platform-python;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8'"), 48 | Requirement("platform-python;sys_platform == 'win32' and python_version < '3.8' and python_version > '3.11'"), 49 | Requirement("platform-python;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8' and python_version > '3.11'"), 50 | Requirement("version-platform-python<=0.9.0.2;sys_platform == 'win32' and python_version < '3.8'"), 51 | Requirement("version-platform-python<=0.9.0.2,>0.9.1;sys_platform == 'win32' and python_version < '3.8'"), 52 | Requirement("version-platform-python<=0.9.0.2,>0.9.1;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8'"), 53 | Requirement("version-platform-python<=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8'"), 54 | Requirement("version-platform-python<=0.9.0.2;sys_platform == 'win32' and python_version < '3.8' and python_version > '3.11'"), 55 | Requirement("version-platform-python<=0.9.0.2,>0.9.1;sys_platform == 'win32' and python_version < '3.8' and python_version > '3.11'"), 56 | Requirement("version-platform-python<=0.9.0.2,>0.9.1;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8' and python_version > '3.11'"), 57 | Requirement("version-platform-python<=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8' and python_version > '3.11'"), 58 | } 59 | 60 | self.assertEqual(yaml_to_requirement('test/test_list.yaml'), test_requirements) 61 | 62 | 63 | def test_yaml_to_requirement_exclude(self): 64 | test_requirements_exclude = {Requirement("platform;sys_platform != 'win32'"), 65 | Requirement("platform;sys_platform != 'win32' or sys_platform != 'linux'"), 66 | Requirement('version>42'), 67 | Requirement('version>42,<50'), 68 | Requirement("python;python_version < '3.10'"), 69 | Requirement("python;python_version < '3.10' and python_version == '3.8'"), 70 | Requirement("version-platform>=0.9.0.2;sys_platform == 'win32'"), 71 | Requirement("version-platform;sys_platform != 'win32'"), 72 | Requirement("version-platform>=0.9.0.2,<0.9.1;sys_platform == 'win32'"), 73 | Requirement("version-platform;sys_platform != 'win32'"), 74 | Requirement("version-platform>=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux'"), 75 | Requirement("version-platform;sys_platform != 'win32' or sys_platform != 'linux'"), 76 | Requirement("version-platform>=0.9.0.2,<0.9.1;sys_platform == 'win32' or sys_platform == 'linux'"), 77 | Requirement("version-platform;sys_platform != 'win32' or sys_platform != 'linux'"), 78 | Requirement("version-python>=0.9.0.2;python_version < '3.8'"), 79 | Requirement("version-python;python_version > '3.8'"), 80 | Requirement("version-python>=0.9.0.2,<0.9.1;python_version < '3.8'"), 81 | Requirement("version-python;python_version > '3.8'"), 82 | Requirement("version-python>=0.9.0.2;python_version < '3.8' and python_version > '3.11'"), 83 | Requirement("version-python;python_version > '3.8' and python_version < '3.11'"), 84 | Requirement("version-python>=0.9.0.2,<0.9.1;python_version < '3.8' and python_version > '3.11'"), 85 | Requirement("version-python;python_version > '3.8' and python_version < '3.11'"), 86 | Requirement("platform-python;sys_platform != 'win32' and python_version > '3.8'"), 87 | Requirement("platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8'"), 88 | Requirement("platform-python;sys_platform != 'win32' and python_version > '3.8' and python_version < '3.11'"), 89 | Requirement("platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8' and python_version < '3.11'"), 90 | Requirement("version-platform-python>=0.9.0.2;sys_platform == 'win32' and python_version < '3.8'"), 91 | Requirement("version-platform-python;sys_platform != 'win32' and python_version > '3.8'"), 92 | Requirement("version-platform-python>=0.9.0.2,<0.9.1;sys_platform == 'win32' and python_version < '3.8'"), 93 | Requirement("version-platform-python;sys_platform != 'win32' and python_version > '3.8'"), 94 | Requirement("version-platform-python>=0.9.0.2,<0.9.1;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8'"), 95 | Requirement("version-platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8'"), 96 | Requirement("version-platform-python>=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8'"), 97 | Requirement("version-platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8'"), 98 | Requirement("version-platform-python>=0.9.0.2;sys_platform == 'win32' and python_version < '3.8' and python_version > '3.11'"), 99 | Requirement("version-platform-python;sys_platform != 'win32' and python_version > '3.8' and python_version < '3.11'"), 100 | Requirement("version-platform-python>=0.9.0.2,<0.9.1;sys_platform == 'win32' and python_version < '3.8' and python_version > '3.11'"), 101 | Requirement("version-platform-python;sys_platform != 'win32' and python_version > '3.8' and python_version < '3.11'"), 102 | Requirement("version-platform-python>=0.9.0.2,<0.9.1;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8' and python_version > '3.11'"), 103 | Requirement("version-platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8' and python_version < '3.11'"), 104 | Requirement("version-platform-python>=0.9.0.2;sys_platform == 'win32' or sys_platform == 'linux' and python_version < '3.8' and python_version > '3.11'"), 105 | Requirement("version-platform-python;sys_platform != 'win32' or sys_platform != 'linux' and python_version > '3.8' and python_version < '3.11'"), 106 | } 107 | 108 | self.assertEqual(yaml_to_requirement('test/test_list.yaml', exclude=True), test_requirements_exclude) 109 | 110 | 111 | if __name__ == '__main__': 112 | unittest.main() 113 | -------------------------------------------------------------------------------- /upload_wheels.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | """This script uploads wheel files from the downloaded wheels directory to S3 bucket. 7 | - argument S3 bucket 8 | """ 9 | import os 10 | import re 11 | import sys 12 | 13 | import boto3 14 | 15 | 16 | s3 = boto3.resource('s3') 17 | try: 18 | BUCKET = s3.Bucket(sys.argv[1]) 19 | except IndexError: 20 | raise SystemExit('Error: S3 bucket name not provided.') 21 | 22 | WHEELS_DIR = f'{os.path.curdir}{(os.sep)}downloaded_wheels' 23 | if not os.path.exists(WHEELS_DIR): 24 | raise SystemExit(f'Error: The wheels directory {WHEELS_DIR} not found.') 25 | 26 | wheels_subdirs = os.listdir(WHEELS_DIR) 27 | 28 | for subdir in wheels_subdirs: 29 | wheel_files = os.listdir(f'{WHEELS_DIR}{os.sep}{subdir}') 30 | 31 | for wheel in wheel_files: 32 | pattern = re.compile(r'(\w*)-(\d+)') 33 | match = pattern.search(wheel) 34 | if match: 35 | wheel_name = match.group(1) 36 | 37 | wheel_name = wheel_name.lower() 38 | wheel_name = wheel_name.replace('_', '-') 39 | 40 | BUCKET.upload_file(f'{WHEELS_DIR}{os.sep}{subdir}{os.sep}{wheel}', f'pypi/{wheel_name}/{wheel}') 41 | print(f'Uploaded {wheel}') 42 | -------------------------------------------------------------------------------- /yaml_list_adapter.py: -------------------------------------------------------------------------------- 1 | # 2 | # SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD 3 | # 4 | # SPDX-License-Identifier: Apache-2.0 5 | # 6 | import re 7 | 8 | import yaml 9 | from colorama import Fore 10 | from packaging.requirements import Requirement 11 | 12 | from _helper_functions import merge_requirements 13 | from _helper_functions import print_color 14 | 15 | 16 | class YAMLListAdapter(): 17 | """Class for loading list of requirements defined in exclude or include lists (YAML files) 18 | with conversion method to packaging.requirements Requirement. 19 | 20 | Requirement is used because pip can directly work with this format. 21 | 22 | When YAML file is loaded, the packages with the same names (package duplicates) are combined into one requirement. 23 | Except when the packages has the different version specified, then it is considered as another requirement. 24 | 25 | The output from this class is a set of requirements (set of Requirement types) 26 | which can be directly used with pip or further processed. 27 | Sets are used to avoid exact duplicates and to keep the requirements unique. 28 | 29 | 30 | - yaml_list ... list of requirements defined in YAML file 31 | - exclude ... boolean to set the logic of the requirements (if True, yaml_list logic is inverted) 32 | - requirements ... set of requirements (types Requirement) which can be directly used with pip 33 | ------------------------------------- 34 | 35 | ### TERMINOLOGY: 36 | 37 | (YAML file ... Requirement (https://packaging.pypa.io/en/stable/requirements.html#packaging.requirements.Requirement.extras)) 38 | - package_name ... NAME of the package/requirement 39 | - version ... (version) SPECIFIER of the package/requirement 40 | - platform ... (sys_platform) MARKER for the package/requirement 41 | - python ... (python_version) MARKER for the package/requirement 42 | 43 | ------------------------------------- 44 | 45 | ### EXAMPLE: 46 | - package_name: numpy\n 47 | \tversion: "<1.20"\n 48 | \tplatform: "win32"\n 49 | \tpython: ">=3.6" 50 | 51 | --> 52 | 53 | # with exclude=False\n 54 | Requirement('numpy<1.20; sys_platform == "win32" and python_version >= "3.6"') 55 | 56 | # with exclude=True (to preserve logic for pip another requirement needs to be added)\n 57 | Requirement('numpy>=1.20; sys_platform == "win32" or python_version >= "3.6"')\n 58 | Requirement('numpy<1.20; sys_platform != "win32" or python_version < "3.6"') 59 | """ 60 | 61 | yaml_list:list = list() 62 | exclude:bool = False 63 | requirements:set = set() 64 | 65 | 66 | def __init__(self, yaml_file:str, exclude:bool = False) -> None: 67 | try: 68 | with open(yaml_file, 'r') as f: 69 | self._yaml_list = yaml.load(f, yaml.Loader) 70 | except FileNotFoundError: 71 | print_color(f'File not found, please check the file: {yaml_file}', Fore.RED) 72 | self.exclude = exclude 73 | 74 | # Assemble duplicates of requirements/packages with the same name and remove them from the YAML list 75 | _requirement_duplicates = self._assemble_requirements_duplicates() 76 | # Convert YAML list to set of requirements without duplicates 77 | self.requirements = self._yaml_to_requirement(self._yaml_list, exclude=self.exclude) 78 | # Combine requirements/packages with the same name (duplicates) 79 | # into one requirement and replaces original requirements 80 | _combine_package_duplicates(self, _requirement_duplicates) 81 | 82 | 83 | def _change_specifier_logic(self, spec_with_text: str) -> tuple: 84 | """Change specifier logic to opposite 85 | e.g. "<1.20" will be ">=1.20" 86 | - this function is used for exclude_list.yaml to change the logic of (version) specifier 87 | to create opposite logic for Requirement installed by pip 88 | 89 | - return (new_version_specifier, text_after(version number), original_version_specifier) 90 | """ 91 | pattern = re.compile(r'(===|==|!=|~=|<=?|>=?|===?)\s*(.*)') 92 | try: 93 | match = pattern.match(spec_with_text) 94 | if match: 95 | str_match:tuple = match.groups() 96 | except AttributeError: 97 | print_color(f'Unexpected version specifier: {spec_with_text}', Fore.YELLOW) 98 | raise SystemExit() 99 | 100 | ver_specifier, text = str_match # e.g. ('<', '1.20') 101 | 102 | for old, new in (('<', '>='), 103 | ('>', '<='), 104 | ('<=', '>'), 105 | ('>=', '<'), 106 | ('!', '='), 107 | ('~', '!'), 108 | ('===', '==='), # not changed specifier for arbitrary equality defined by PEP440 109 | # (https://packaging.python.org/en/latest/specifications/version-specifiers/#arbitrary-equality) 110 | ('==', '!=')): 111 | if old in ver_specifier: 112 | new_ver_spec = ver_specifier.replace(old, new) 113 | break 114 | return (new_ver_spec, text, ver_specifier) 115 | 116 | 117 | def _yaml_to_requirement(self, yaml:list, exclude: bool = False) -> set: 118 | """Converts YAML defined requirement into packaging.requirements Requirement 119 | which can be directly used with pip. 120 | 121 | Markers (platform and python) are ANDed between and multiple values of the marker are ORed between. 122 | 123 | When exclude is set to True, the logic of the Requirement is changed to be excluded by pip. 124 | To preserve the logic, another requirement needs to be added 125 | when exclusion is only for platform or python version. 126 | 127 | ------------------------------------- 128 | 129 | ### EXAMPLE for exclude=True: 130 | 131 | - requirement from ESP-IDF is click>=7.0 132 | 133 | #### -- in exclude_list.yaml is defined --> 134 | 135 | - package_name: click\n 136 | \tversion: ['>8', '==7.2']\n 137 | \tplatform: "win32"\n 138 | 139 | #### -- the output will be --> 140 | 141 | -- click>=7.0 # remove requirement\n 142 | ++ click>=7.0; sys_platform != "win32" # add requirement constraining platform\n 143 | ++ click!=7.2,<=8; sys_platform == "win32" # add requirement constraining version on supported platform 144 | 145 | """ 146 | yaml_list:list = yaml 147 | 148 | requirements_set: set[Requirement] = set() 149 | 150 | if not yaml_list: 151 | return requirements_set 152 | 153 | for package in yaml_list: 154 | # get attributes of the package if defined to reduce unnecessary complexity 155 | package_version = package['version'] if 'version' in package else '' 156 | package_platform = package['platform'] if 'platform' in package else '' 157 | package_python = package['python'] if 'python' in package else '' 158 | 159 | requirement_str_list = [f"{package['package_name']}"] 160 | 161 | # if package has version specifier, process it and add to the requirement 162 | if package_version: 163 | if not isinstance(package_version, list): 164 | new_spec, ver, old_spec = self._change_specifier_logic(package_version) 165 | requirement_str_list.append( 166 | f'{new_spec}{ver}' if exclude else f'{old_spec}{ver}' 167 | ) 168 | 169 | else: # list of version specifiers defined 170 | version_list = [] 171 | for elem in package_version: 172 | new_spec, ver, old_spec = self._change_specifier_logic(elem) 173 | if exclude: 174 | version_list.append(f'{new_spec}{ver}') 175 | else: 176 | version_list.append(f'{old_spec}{ver}') 177 | 178 | requirement_str_list.append(','.join(version_list)) 179 | 180 | # if package has platform markers defined, add it to the requirement 181 | if package_platform or package_python: 182 | requirement_str_list.append('; ') 183 | 184 | if package_platform and not package_version: 185 | if not isinstance(package_platform, list): 186 | requirement_str_list.append(( 187 | f"sys_platform != '{package_platform}'" if exclude 188 | else f"sys_platform == '{package_platform}'" 189 | )) 190 | 191 | else: # list of platforms defined 192 | platform_list = ( 193 | [f"sys_platform != '{plf}'" if exclude 194 | else f"sys_platform == '{plf}'" for plf in package_platform] 195 | ) 196 | 197 | requirement_str_list.append(' or '.join(platform_list)) 198 | 199 | if exclude and (package_platform or package_python) and package_version: 200 | requirement_old_str_list = [f"{package['package_name']}; "] 201 | 202 | if package_platform and package_version: 203 | if not isinstance(package_platform, list): 204 | requirement_str_list.append(f"sys_platform == '{package_platform}'") 205 | 206 | if exclude: 207 | requirement_old_str_list.append(f"sys_platform != '{package_platform}'") 208 | 209 | else: 210 | platform_list = [f"sys_platform == '{plf}'" for plf in package_platform] 211 | requirement_str_list.append(' or '.join(platform_list)) 212 | 213 | if exclude: 214 | platform_list_old = [f"sys_platform != '{plf}'" for plf in package_platform] 215 | requirement_old_str_list.append(' or '.join(platform_list_old)) 216 | 217 | if package_platform and package_python: 218 | requirement_str_list.append(' and ') 219 | 220 | if exclude and (package_platform and package_python) and package_version: 221 | requirement_old_str_list.append(' and ') 222 | 223 | # if package has python markers defined, add it to the requirement 224 | if package_python and not package_version: 225 | if not isinstance(package_python, list): 226 | new_spec, text_after, old_spec = self._change_specifier_logic(package_python) 227 | requirement_str_list.append(( 228 | f"python_version {new_spec} '{text_after}'" if exclude 229 | else f"python_version {old_spec} '{text_after}'" 230 | )) 231 | 232 | else: # list of python versions defined 233 | python_list = [] 234 | for elem in package_python: 235 | new_spec, text_after, old_spec = self._change_specifier_logic(elem) 236 | if exclude: 237 | python_list.append(f"python_version {new_spec} '{text_after}'") 238 | else: 239 | python_list.append(f"python_version {old_spec} '{text_after}'") 240 | 241 | requirement_str_list.append(' and '.join(python_list)) 242 | 243 | if package_python and package_version: 244 | 245 | if not isinstance(package_python, list): 246 | new_spec, text_after, old_spec = self._change_specifier_logic(package_python) 247 | requirement_str_list.append(f"python_version {old_spec} '{text_after}'") 248 | 249 | if exclude: 250 | requirement_old_str_list.append(f"python_version {new_spec} '{text_after}'") 251 | 252 | else: 253 | python_list = [] 254 | python_list_old = [] 255 | for elem in package_python: 256 | new_spec, text_after, old_spec = self._change_specifier_logic(elem) 257 | 258 | python_list.append(f"python_version {old_spec} '{text_after}'") 259 | if exclude: 260 | python_list_old.append(f"python_version {new_spec} '{text_after}'") 261 | requirement_str_list.append('' + ' and '.join(python_list)) 262 | 263 | if exclude: 264 | requirement_old_str_list.append(' and '.join(python_list_old)) 265 | 266 | if exclude and (package_platform or package_python) and package_version: 267 | requirements_set.add(Requirement(''.join(requirement_old_str_list))) 268 | 269 | requirements_set.add(Requirement(''.join(requirement_str_list))) 270 | return requirements_set 271 | 272 | 273 | def _assemble_requirements_duplicates(self): 274 | """Creates dictionary of requirements with the same requirement/package name for further processing. 275 | - key is the name of the requirement/package and value is a set of requirements (types Requirement) 276 | - different version of package is considered as another requirement, not duplicate which is combined 277 | 278 | ------------------------------------- 279 | ### EXAMPLE: 280 | #### - YAML list defined requirements (exclude_list.yaml) 281 | #dbus-python can not be build on Windows\n 282 | - package_name: 'dbus-python'\n 283 | \tplatform: ['win32'] 284 | 285 | #dbus-python can not be build with Python > 3.11 on MacOS\n 286 | - package_name: 'dbus-python'\n 287 | \tplatform: 'darwin'\n 288 | \tpython: '>3.11' 289 | 290 | - package_name: 'dbus-python'\n 291 | \tplatform: 'linux' 292 | 293 | #### -- will assemble following dictionary (exclude=True) --> 294 | {'dbus-python': {, 295 | }} 296 | 297 | #### - Also removes requirement/package from the YAML list except first occurrence 298 | """ 299 | duplicates_dict = {} 300 | for i, requirement in enumerate(self._yaml_list): 301 | package_name = requirement['package_name'] 302 | 303 | for next_requirement in self._yaml_list[i+1:]: 304 | if next_requirement['package_name'] == package_name and next_requirement != requirement: 305 | if package_name not in duplicates_dict: 306 | duplicates_dict[package_name] = set() 307 | 308 | if 'version' in next_requirement: 309 | # Different version of package is not considered as duplicate, but new requirement 310 | continue 311 | 312 | duplicates_dict[package_name].add( 313 | list(self._yaml_to_requirement([next_requirement.copy()], self.exclude))[0] 314 | ) 315 | self._yaml_list.remove(next_requirement) 316 | 317 | return duplicates_dict 318 | 319 | 320 | def _combine_package_duplicates(self, requirement_duplicates:dict): 321 | """Combines requirements/packages of the YAMLListAdapter with the requirement/package 322 | (duplicates from assembled dict) and replaces the original requirements set. 323 | 324 | ------------------------------------- 325 | 326 | ### EXAMPLE: 327 | #### - YAML list defined requirements (exclude_list.yaml) 328 | #dbus-python can not be build on Windows\n 329 | - package_name: 'dbus-python'\n 330 | \tplatform: ['win32'] 331 | 332 | #dbus-python can not be build with Python > 3.11 on MacOS\n 333 | - package_name: 'dbus-python'\n 334 | \tplatform: 'darwin'\n 335 | \tpython: '>3.11' 336 | 337 | - package_name: 'dbus-python'\n 338 | \tplatform: 'linux' 339 | 340 | #### - Assembled duplicates dictionary (exclude=True) 341 | {'dbus-python': {, 342 | }} 343 | 344 | #### -- will replace original requirement with following (exclude=True) --> 345 | dbus-python;sys_platform == "linux" and ( 346 | (sys_platform != "win32" and ( 347 | sys_platform != "darwin" and python_version <= "3.11")) and sys_platform != "linux") 348 | 349 | #### - directly used with pip constraining the installation of dbus-python defined in exclude_list.yaml 350 | """ 351 | new_requirements:set = set() 352 | for requirement in self.requirements: 353 | if requirement.name in requirement_duplicates: 354 | # empty strings for new version specifier and marker because it is added every time to new requirement 355 | for duplicate in requirement_duplicates[requirement.name]: 356 | # rewrite requirement to continuously merge any following duplicate 357 | requirement = merge_requirements(requirement, duplicate) #new_requirement 358 | # add new requirement or unchanged requirement to the set of requirements 359 | new_requirements.add(requirement) 360 | # replace original requirements with new requirements 361 | self.requirements = new_requirements 362 | --------------------------------------------------------------------------------