├── .ci └── requirements-cibw.txt ├── .clang-format ├── .github └── workflows │ ├── install.sh │ ├── lint.yml │ ├── linux-install.sh │ ├── macos-install.sh │ ├── test.yml │ ├── wheels-dependencies.sh │ ├── wheels-test.ps1 │ ├── wheels-test.sh │ └── wheels.yml ├── .gitignore ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── README.md ├── codecov.yml ├── conftest.py ├── depends └── install_libavif.sh ├── pyproject.toml ├── setup.cfg ├── setup.py ├── src └── pillow_avif │ ├── AvifImagePlugin.py │ ├── __init__.py │ └── _avif.c ├── tests ├── __init__.py ├── helper.py ├── images │ ├── chi.gif │ ├── chimera-missing-pixi.avif │ ├── exif.avif │ ├── flower.jpg │ ├── hopper.avif │ ├── hopper.ppm │ ├── hopper_avif_write.png │ ├── icc_profile.avif │ ├── icc_profile_none.avif │ ├── rgba10.heif │ ├── star.avifs │ ├── star.gif │ ├── star.png │ ├── star180.png │ ├── star270.png │ ├── star90.png │ ├── transparency.avif │ ├── xmp_tags_orientation.avif │ └── xmp_tags_orientation.png └── test_file_avif.py ├── tox.ini ├── wheelbuild ├── aom-2.0.2-manylinux1.patch ├── aom-fix-stack-size.patch ├── build.sh ├── config.sh ├── dependency_licenses │ ├── AOM.txt │ ├── DAV1D.txt │ ├── LIBGAV1.txt │ ├── LIBYUV.txt │ ├── PATENTS │ ├── RAV1E.txt │ └── SVT-AV1.txt ├── libavif-1.0.1-local-static.patch └── toolchain-arm64-macos.cmake └── winbuild ├── Findrav1e.cmake └── build_prepare.py /.ci/requirements-cibw.txt: -------------------------------------------------------------------------------- 1 | cibuildwheel==2.23.0 2 | -------------------------------------------------------------------------------- /.clang-format: -------------------------------------------------------------------------------- 1 | # A clang-format style that approximates Python's PEP 7 2 | # Useful for IDE integration 3 | BasedOnStyle: Google 4 | AlwaysBreakAfterReturnType: All 5 | AllowShortIfStatementsOnASingleLine: false 6 | AlignAfterOpenBracket: AlwaysBreak 7 | BinPackArguments: false 8 | BinPackParameters: false 9 | BreakBeforeBraces: Attach 10 | ColumnLimit: 88 11 | DerivePointerAlignment: false 12 | IndentWidth: 4 13 | Language: Cpp 14 | PointerAlignment: Right 15 | ReflowComments: true 16 | SortIncludes: false 17 | SpaceBeforeParens: ControlStatements 18 | SpacesInParentheses: false 19 | TabWidth: 4 20 | UseTab: Never 21 | StatementMacros: 22 | - PyObject_HEAD 23 | - Py_BEGIN_ALLOW_THREADS 24 | - Py_END_ALLOW_THREADS 25 | -------------------------------------------------------------------------------- /.github/workflows/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eo pipefail 3 | 4 | if uname -s | grep -q Darwin; then 5 | $(dirname $0)/macos-install.sh 6 | else 7 | $(dirname $0)/linux-install.sh 8 | fi 9 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v4 11 | 12 | - uses: reviewdog/action-setup@v1 13 | 14 | - name: Set environment variables 15 | run: | 16 | echo "REPORTER=${{ github.event_name == 'pull_request' && 'github-pr-review' || 'github-check' }}" >> $GITHUB_ENV 17 | echo "REVIEWDOG_GITHUB_API_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> $GITHUB_ENV 18 | 19 | - name: clang-format 20 | uses: DoozyX/clang-format-lint-action@v0.14 21 | with: 22 | source: 'src' 23 | clangFormatVersion: 14 24 | inplace: True 25 | 26 | - name: Report clang-format 27 | run: | 28 | wget http://misc.nalajcie.org/reviewdog -O /tmp/reviewdog 2>/dev/null 29 | chmod +x /tmp/reviewdog 30 | /tmp/reviewdog -filter-mode=nofilter -f diff -name="clang-format" -f.diff.strip=1 -reporter=${{ env.REPORTER }} 31 | git stash -u && git stash drop ||: 32 | 33 | - name: flake8 34 | uses: reviewdog/action-flake8@v3 35 | with: 36 | github_token: ${{ secrets.GITHUB_TOKEN }} 37 | reporter: ${{ env.REPORTER }} 38 | filter_mode: nofilter 39 | 40 | - name: black 41 | uses: reviewdog/action-black@v3 42 | with: 43 | github_token: ${{ secrets.GITHUB_TOKEN }} 44 | reporter: ${{ env.REPORTER }} 45 | filter_mode: nofilter 46 | 47 | -------------------------------------------------------------------------------- /.github/workflows/linux-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | aptget() 6 | { 7 | if [ $(id -u) == 0 ]; then 8 | apt-get "$@" 9 | else 10 | sudo apt-get "$@" 11 | fi 12 | } 13 | 14 | aptget_update() 15 | { 16 | if [ ! -z $1 ]; then 17 | echo "" 18 | echo "Retrying apt-get update..." 19 | echo "" 20 | fi 21 | output=`aptget update 2>&1` 22 | echo "$output" 23 | if [[ $output == *[WE]:\ * ]]; then 24 | return 1 25 | fi 26 | } 27 | aptget_update || aptget_update retry || aptget_update retry 28 | 29 | set -e 30 | 31 | aptget -qq install zlib1g-dev libpng-dev libjpeg-dev sudo \ 32 | libxml2-dev libffi-dev libxslt-dev cmake ninja-build nasm 33 | 34 | if [ "$GHA_PYTHON_VERSION" == "2.7" ]; then 35 | python2 -m pip install tox tox-gh-actions 36 | aptget install -y python3-pip 37 | else 38 | python3 -m pip install 'tox<4' tox-gh-actions 39 | fi 40 | 41 | python3 -m pip install -U pip 42 | python3 -m pip install -U wheel 43 | python3 -m pip install setuptools wheel 44 | 45 | export PATH="$HOME/.local/bin:$PATH" 46 | 47 | # libavif 48 | if [ ! -d depends/libavif-$LIBAVIF_VERSION ]; then 49 | pushd depends && ./install_libavif.sh && popd 50 | fi 51 | pushd depends/libavif-$LIBAVIF_VERSION/build 52 | sudo make install 53 | popd 54 | -------------------------------------------------------------------------------- /.github/workflows/macos-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # See https://github.com/actions/runner-images/issues/9471 for why we have 6 | # brew unlink and brew link commands here 7 | brew unlink cmake || true 8 | brew reinstall cmake || true 9 | brew link --overwrite cmake 10 | 11 | brew install dav1d aom rav1e 12 | 13 | if [ "$GHA_PYTHON_VERSION" == "2.7" ]; then 14 | python2 -m pip install -U tox tox-gh-actions 15 | else 16 | python3 -m pip install -U 'tox<4' tox-gh-actions 17 | fi 18 | 19 | # libavif 20 | if [ ! -d depends/libavif-$LIBAVIF_VERSION ]; then 21 | pushd depends && ./install_libavif.sh && popd 22 | fi 23 | pushd depends/libavif-$LIBAVIF_VERSION/build 24 | make install 25 | popd 26 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | push: 6 | jobs: 7 | build: 8 | 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | os: [ 13 | "ubuntu-22.04", 14 | "macos-13", 15 | ] 16 | python-version: [ 17 | "pypy-3.7", 18 | "3.12", 19 | "3.11", 20 | "3.10", 21 | "3.9", 22 | "3.8", 23 | "3.7", 24 | "2.7", 25 | ] 26 | libavif-version: [ "0.11.0" ] 27 | include: 28 | - python-version: "3.9" 29 | os: "ubuntu-22.04" 30 | libavif-version: "1d469864478de5686a13c06b5539416ac68d98d7" 31 | - python-version: "3.7" 32 | PYTHONOPTIMIZE: 1 33 | - python-version: "3.8" 34 | PYTHONOPTIMIZE: 2 35 | # Include new variables for Codecov 36 | - os: ubuntu-22.04 37 | codecov-flag: GHA_Ubuntu 38 | - os: macos-13 39 | codecov-flag: GHA_macOS 40 | exclude: 41 | - python-version: "2.7" 42 | os: "macos-13" 43 | 44 | runs-on: ${{ matrix.os }} 45 | name: ${{ matrix.os }} Python ${{ matrix.python-version }} (libavif ${{ matrix.libavif-version }}) 46 | container: 47 | image: ${{ matrix.python-version == '2.7' && 'python:2.7-buster' || null }} 48 | 49 | env: 50 | LIBAVIF_VERSION: ${{ matrix.libavif-version }} 51 | 52 | steps: 53 | - uses: actions/checkout@v4 54 | 55 | - name: Set up Python ${{ matrix.python-version }} 56 | if: matrix.python-version != '2.7' 57 | uses: actions/setup-python@v4 58 | with: 59 | python-version: ${{ matrix.python-version }} 60 | 61 | - name: Cache build 62 | id: build-cache 63 | uses: actions/cache@v3 64 | with: 65 | path: depends/libavif-${{ env.LIBAVIF_VERSION }} 66 | key: 67 | ${{ env.LIBAVIF_VERSION }}-${{ hashFiles('.github/workflows/*.sh', '.github/workflows/test.yml', 'depends/*') }}-${{ matrix.os }} 68 | 69 | - name: Install nasm 70 | uses: ilammy/setup-nasm@v1 71 | with: 72 | version: 2.15.05 73 | 74 | - name: Install dependencies 75 | run: | 76 | .github/workflows/install.sh 77 | env: 78 | GHA_PYTHON_VERSION: ${{ matrix.python-version }} 79 | 80 | - name: Test 81 | run: | 82 | tox 83 | env: 84 | PYTHONOPTIMIZE: ${{ matrix.PYTHONOPTIMIZE }} 85 | 86 | - name: Prepare to upload errors 87 | if: failure() 88 | run: | 89 | mkdir -p tests/errors 90 | 91 | - name: Upload errors 92 | uses: actions/upload-artifact@v4 93 | if: failure() 94 | with: 95 | name: errors 96 | path: tests/errors 97 | 98 | - name: Combine coverage 99 | run: tox -e coverage-report 100 | env: 101 | CODECOV_NAME: ${{ matrix.os }} Python ${{ matrix.python-version }} 102 | 103 | - name: Upload coverage 104 | uses: codecov/codecov-action@v5 105 | with: 106 | files: ./coverage.xml 107 | 108 | msys: 109 | runs-on: windows-latest 110 | 111 | name: MinGW 112 | 113 | defaults: 114 | run: 115 | shell: bash.exe --login -eo pipefail "{0}" 116 | 117 | env: 118 | MSYSTEM: MINGW64 119 | CHERE_INVOKING: 1 120 | 121 | timeout-minutes: 30 122 | 123 | steps: 124 | - uses: actions/checkout@v4 125 | 126 | - name: Set up shell 127 | run: echo "C:\msys64\usr\bin\" >> $env:GITHUB_PATH 128 | shell: pwsh 129 | 130 | - name: Install dependencies 131 | run: | 132 | pacman -S --noconfirm \ 133 | base-devel \ 134 | git \ 135 | mingw-w64-x86_64-gcc \ 136 | mingw-w64-x86_64-toolchain \ 137 | mingw-w64-x86_64-python-pip \ 138 | mingw-w64-x86_64-python-setuptools \ 139 | mingw-w64-x86_64-python-pillow \ 140 | mingw-w64-x86_64-python-pytest \ 141 | mingw-w64-x86_64-python-pytest-cov \ 142 | mingw-w64-x86_64-libjpeg-turbo \ 143 | mingw-w64-x86_64-libtiff \ 144 | mingw-w64-x86_64-libpng \ 145 | mingw-w64-x86_64-openjpeg2 \ 146 | mingw-w64-x86_64-zlib \ 147 | mingw-w64-x86_64-libavif 148 | 149 | - name: Build pillow-avif-plugin 150 | run: CFLAGS="-coverage" python3 -m pip install . 151 | 152 | - name: Test pillow-avif-plugin 153 | run: | 154 | python3 -m pytest -vx --cov pillow_avif --cov tests --cov-report term --cov-report xml tests 155 | 156 | - name: Upload coverage 157 | uses: codecov/codecov-action@v5 158 | with: 159 | files: ./coverage.xml 160 | flags: GHA_Windows 161 | name: "MSYS2 MinGW" 162 | success: 163 | needs: [build, msys] 164 | runs-on: ubuntu-latest 165 | name: Test Successful 166 | steps: 167 | - name: Success 168 | run: echo Test Successful 169 | -------------------------------------------------------------------------------- /.github/workflows/wheels-dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Define custom utilities 3 | # Setup that needs to be done before multibuild utils are invoked 4 | PROJECTDIR=$(pwd) 5 | if [[ "$(uname -s)" == "Darwin" ]]; then 6 | # Safety check - macOS builds require that CIBW_ARCHS is set, and that it 7 | # only contains a single value (even though cibuildwheel allows multiple 8 | # values in CIBW_ARCHS). 9 | if [[ -z "$CIBW_ARCHS" ]]; then 10 | echo "ERROR: Pillow macOS builds require CIBW_ARCHS be defined." 11 | exit 1 12 | fi 13 | if [[ "$CIBW_ARCHS" == *" "* ]]; then 14 | echo "ERROR: Pillow macOS builds only support a single architecture in CIBW_ARCHS." 15 | exit 1 16 | fi 17 | 18 | # Build macOS dependencies in `build/darwin` 19 | # Install them into `build/deps/darwin` 20 | export WORKDIR=$(pwd)/build/darwin 21 | export BUILD_PREFIX=$(pwd)/build/deps/darwin 22 | else 23 | # Build prefix will default to /usr/local 24 | export WORKDIR=$(pwd)/build 25 | export MB_ML_LIBC=${AUDITWHEEL_POLICY::9} 26 | export MB_ML_VER=${AUDITWHEEL_POLICY:9} 27 | fi 28 | export PLAT="${CIBW_ARCHS:-$AUDITWHEEL_ARCH}" 29 | 30 | source multibuild/common_utils.sh 31 | source multibuild/library_builders.sh 32 | if [ -z "$IS_MACOS" ]; then 33 | source multibuild/manylinux_utils.sh 34 | fi 35 | 36 | source wheelbuild/config.sh 37 | 38 | function build_pkg_config { 39 | if [ -e pkg-config-stamp ]; then return; fi 40 | # This essentially duplicates the Homebrew recipe 41 | CFLAGS="$CFLAGS -Wno-int-conversion" build_simple pkg-config 0.29.2 https://pkg-config.freedesktop.org/releases tar.gz \ 42 | --disable-debug --disable-host-tool --with-internal-glib \ 43 | --with-pc-path=$BUILD_PREFIX/share/pkgconfig:$BUILD_PREFIX/lib/pkgconfig \ 44 | --with-system-include-path=$(xcrun --show-sdk-path --sdk macosx)/usr/include 45 | export PKG_CONFIG=$BUILD_PREFIX/bin/pkg-config 46 | touch pkg-config-stamp 47 | } 48 | 49 | function build { 50 | if [[ -n "$IS_MACOS" ]] && [[ "$CIBW_ARCHS" == "arm64" ]]; then 51 | sudo chown -R runner /usr/local 52 | fi 53 | pre_build 54 | } 55 | 56 | if [[ -n "$IS_MACOS" ]]; then 57 | # Homebrew (or similar packaging environments) install can contain some of 58 | # the libraries that we're going to build. However, they may be compiled 59 | # with a MACOSX_DEPLOYMENT_TARGET that doesn't match what we want to use, 60 | # and they may bring in other dependencies that we don't want. The same will 61 | # be true of any other locations on the path. To avoid conflicts, strip the 62 | # path down to the bare minimum (which, on macOS, won't include any 63 | # development dependencies). 64 | export PATH="$BUILD_PREFIX/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin" 65 | export CMAKE_PREFIX_PATH=$BUILD_PREFIX 66 | 67 | # Ensure the basic structure of the build prefix directory exists. 68 | mkdir -p "$BUILD_PREFIX/bin" 69 | mkdir -p "$BUILD_PREFIX/lib" 70 | 71 | # Ensure pkg-config is available 72 | build_pkg_config 73 | # Ensure cmake is available 74 | python3 -m pip install cmake 75 | fi 76 | 77 | # Perform all dependency builds in the build subfolder. 78 | mkdir -p $WORKDIR 79 | pushd $WORKDIR > /dev/null 80 | 81 | wrap_wheel_builder build 82 | 83 | # Return to the project root to finish the build 84 | popd > /dev/null 85 | -------------------------------------------------------------------------------- /.github/workflows/wheels-test.ps1: -------------------------------------------------------------------------------- 1 | param ([string]$venv, [string]$pillow_avif_plugin="C:\pillow-avif-plugin") 2 | $ErrorActionPreference = 'Stop' 3 | $ProgressPreference = 'SilentlyContinue' 4 | Set-PSDebug -Trace 1 5 | if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") { 6 | # unlike CPython, PyPy requires Visual C++ Redistributable to be installed 7 | [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 8 | Invoke-WebRequest -Uri 'https://aka.ms/vs/15/release/vc_redist.x64.exe' -OutFile 'vc_redist.x64.exe' 9 | C:\vc_redist.x64.exe /install /quiet /norestart | Out-Null 10 | } 11 | $env:path += ";$pillow_avif_plugin\winbuild\build\bin\" 12 | & "$venv\Scripts\activate.ps1" 13 | & reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f 14 | cd $pillow_avif_plugin 15 | & python -VV 16 | if (!$?) { exit $LASTEXITCODE } 17 | & python -m pytest -vx tests 18 | if (!$?) { exit $LASTEXITCODE } 19 | -------------------------------------------------------------------------------- /.github/workflows/wheels-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | python3 -m pytest 5 | -------------------------------------------------------------------------------- /.github/workflows/wheels.yml: -------------------------------------------------------------------------------- 1 | name: Wheels 2 | 3 | on: 4 | pull_request: 5 | push: 6 | env: 7 | REPO_DIR: pillow-avif-plugin 8 | BUILD_DEPENDS: "" 9 | TEST_DEPENDS: "pytest pillow" 10 | MACOSX_DEPLOYMENT_TARGET: "10.10" 11 | WHEEL_SDIR: wheelhouse 12 | CONFIG_PATH: pillow-avif-plugin/wheelbuild/config.sh 13 | LIBAVIF_VERSION: 1.2.1 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.ref }} 17 | cancel-in-progress: true 18 | 19 | jobs: 20 | legacy: 21 | name: ${{ matrix.os-name }} ${{ matrix.arch == 'x86' && 'x86_64' || matrix.os-type == 'macos' && 'arm64' || 'aarch64' }} (${{ matrix.python }}) 22 | runs-on: ${{ (matrix.os-type == 'ubuntu' && matrix.arch == 'arm' && 'ubuntu-24.04-arm') || (matrix.os-type == 'ubuntu' && 'ubuntu-latest') || (matrix.os-type == 'macos' && (matrix.arch == 'x86' || matrix.python == '2.7' || matrix.python == '3.7')) && 'macos-13' || 'macos-latest' }} 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | os-type: [ "ubuntu", "macos" ] 27 | python: [ "2.7", "3.7", "3.8" ] 28 | arch: [ "x86", "arm" ] 29 | manylinux-version: [ "2014" ] 30 | mb-ml-libc: [ "manylinux", "musllinux" ] 31 | exclude: 32 | - python: "2.7" 33 | os-type: "ubuntu" 34 | manylinux-version: "2014" 35 | - python: "3.7" 36 | os-type: "ubuntu" 37 | manylinux-version: "2014" 38 | - python: "2.7" 39 | os-type: "ubuntu" 40 | arch: "arm" 41 | - python: "2.7" 42 | mb-ml-libc: "musllinux" 43 | - python: "3.7" 44 | mb-ml-libc: "musllinux" 45 | - arch: "arm" 46 | mb-ml-libc: "musllinux" 47 | - os-type: "macos" 48 | mb-ml-libc: "musllinux" 49 | include: 50 | - os-type: "macos" 51 | arch: "arm" 52 | os-name: "macOS 11.0" 53 | dist-name: "macos-11.0" 54 | - os-type: "macos" 55 | arch: "x86" 56 | os-name: "macOS 10.10" 57 | dist-name: "macos-10.10" 58 | - os-type: "ubuntu" 59 | os: "ubuntu-latest" 60 | python: "2.7" 61 | arch: "x86" 62 | manylinux-version: "2010" 63 | os-name: "manylinux2010" 64 | - os-type: "ubuntu" 65 | os: "ubuntu-latest" 66 | python: "3.7" 67 | arch: "x86" 68 | manylinux-version: "2010" 69 | os-name: "manylinux2010" 70 | - os-type: "ubuntu" 71 | os: "ubuntu-latest" 72 | manylinux-version: "2014" 73 | os-name: "manylinux2014" 74 | - mb-ml-libc: "musllinux" 75 | os-type: "ubuntu" 76 | os-name: "musllinux" 77 | env: 78 | BUILD_COMMIT: HEAD 79 | PLAT: ${{ matrix.arch == 'x86' && 'x86_64' || (matrix.os-type == 'macos' && 'arm64') || 'aarch64' }} 80 | MB_PYTHON_VERSION: ${{ matrix.python }} 81 | TRAVIS_OS_NAME: ${{ matrix.os-name }} 82 | MB_ML_VER: ${{ matrix.mb-ml-libc == 'musllinux' && '_1_1' || matrix.manylinux-version }} 83 | MACOSX_DEPLOYMENT_TARGET: ${{ matrix.arch == 'arm' && '11.0' || '10.10' }} 84 | MB_ML_LIBC: ${{ matrix.mb-ml-libc }} 85 | steps: 86 | - uses: actions/checkout@v4 87 | with: 88 | path: pillow-avif-plugin 89 | 90 | - name: Checkout multibuild 91 | uses: actions/checkout@v4 92 | with: 93 | repository: multi-build/multibuild 94 | path: multibuild 95 | ref: ${{ (matrix.os == 'macos-latest' || (env.PLAT == 'arm64' && matrix.python != '2.7')) && '88146e74ebc86baf97b6fec448ef766d64326582' || '34e970c4bc448b73af0127615fc4583b4f247369' }} 96 | 97 | - uses: actions/setup-python@v4 98 | with: 99 | python-version: 3.9 100 | 101 | - name: Set up QEMU 102 | uses: docker/setup-qemu-action@v2 103 | if: ${{ matrix.os == 'ubuntu-latest' && matrix.arch == 'arm' }} 104 | 105 | - name: Setup env_vars 106 | run: | 107 | cat <<'EOF' >> env_vars.sh 108 | export LIBAVIF_VERSION=${{ env.LIBAVIF_VERSION }}" 109 | export GITHUB_ACTIONS=1" 110 | EOF 111 | 112 | # - name: Cache build 113 | # id: build-cache 114 | # uses: actions/cache@v3 115 | # with: 116 | # path: pillow-avif-plugin/depends/libavif-${{ env.LIBAVIF_VERSION }} 117 | # key: 118 | # ${{ env.LIBAVIF_VERSION }}-${{ hashFiles('pillow-avif-plugin/wheelbuild/*.sh', 'pillow-avif-plugin/.github/workflows/wheels.yml', 'pillow-avif-plugin/depends/*') }}-${{ matrix.os }}-${{ env.PLAT }} 119 | 120 | - name: Cache ccache/sccache 121 | uses: actions/cache@v3 122 | with: 123 | path: | 124 | ccache 125 | sccache 126 | key: 127 | cache-${{ matrix.os }}-${{ matrix.os-name }}-${{ env.PLAT }}-${{ hashFiles('pillow-avif-plugin/wheelbuild/*.sh', 'pillow-avif-plugin/.github/workflows/wheels.yml', 'pillow-avif-plugin/depends/*', 'pillow-avif-plugin/**/*.py', 'pillow-avif-plugin/**/*.c') }}-${{ matrix.python }} 128 | restore-keys: | 129 | cache-${{ matrix.os }}-${{ matrix.os-name }}-${{ env.PLAT }}-${{ hashFiles('pillow-avif-plugin/wheelbuild/*.sh', 'pillow-avif-plugin/.github/workflows/wheels.yml', 'pillow-avif-plugin/depends/*', 'pillow-avif-plugin/**/*.py', 'pillow-avif-plugin/**/*.c') }}-${{ matrix.python }} 130 | cache-${{ matrix.os }}-${{ matrix.os-name }}-${{ env.PLAT }}-${{ hashFiles('pillow-avif-plugin/wheelbuild/*.sh', 'pillow-avif-plugin/.github/workflows/wheels.yml', 'pillow-avif-plugin/depends/*', 'pillow-avif-plugin/**/*.py', 'pillow-avif-plugin/**/*.c') }} 131 | cache-${{ matrix.os }}-${{ matrix.os-name }}-${{ env.PLAT }}- 132 | 133 | - name: Build Wheel 134 | run: pillow-avif-plugin/wheelbuild/build.sh 135 | 136 | - name: Fix Directory Permissions 137 | run: | 138 | sudo chown -R $(whoami):$(id -ng) ccache ||: 139 | sudo chown -R $(whoami):$(id -ng) sccache ||: 140 | 141 | - uses: actions/upload-artifact@v4 142 | with: 143 | name: dist-${{ matrix.dist-name || matrix.os-name }}-${{ matrix.arch == 'x86' && 'x86_64' || (matrix.os-type == 'macos' && 'arm64') || 'aarch64' }}-python${{ matrix.python }} 144 | path: wheelhouse/*.whl 145 | 146 | build-native-wheels: 147 | name: ${{ matrix.name }} 148 | runs-on: ${{ matrix.os }} 149 | strategy: 150 | fail-fast: false 151 | matrix: 152 | include: 153 | - name: "macOS 10.10 x86_64" 154 | os: macos-13 155 | cibw_arch: x86_64 156 | build: "cp3{9,10,11}*" 157 | macosx_deployment_target: "10.10" 158 | dist_name: "macos-10.10-x86_64" 159 | - name: "macOS 10.13 x86_64" 160 | os: macos-13 161 | cibw_arch: x86_64 162 | build: "cp3{12,13}*" 163 | macosx_deployment_target: "10.13" 164 | dist_name: "macos-10.13-x86_64" 165 | - name: "macOS 10.15 x86_64" 166 | os: macos-13 167 | cibw_arch: x86_64 168 | build: "pp310*" 169 | macosx_deployment_target: "10.15" 170 | dist_name: "macos-10.15-x86_64" 171 | - name: "macOS arm64" 172 | os: macos-latest 173 | cibw_arch: arm64 174 | macosx_deployment_target: "11.0" 175 | dist_name: "macos-11-arm64" 176 | - name: "manylinux2014 x86_64" 177 | os: ubuntu-latest 178 | cibw_arch: x86_64 179 | build: "*manylinux*" 180 | dist_name: "manylinux2014-x86_64" 181 | - name: "musllinux x86_64" 182 | os: ubuntu-latest 183 | cibw_arch: x86_64 184 | build: "*musllinux*" 185 | dist_name: "musllinux-x86_64" 186 | - name: "manylinux_2_28 x86_64" 187 | os: ubuntu-latest 188 | cibw_arch: x86_64 189 | build: "*manylinux*" 190 | manylinux: "manylinux_2_28" 191 | dist_name: "manylinux_2_28-x86_64" 192 | - name: "manylinux2014 aarch64" 193 | os: ubuntu-24.04-arm 194 | cibw_arch: aarch64 195 | build: "*manylinux*" 196 | dist_name: "manylinux2014-aarch64" 197 | - name: "musllinux aarch64" 198 | os: ubuntu-24.04-arm 199 | cibw_arch: aarch64 200 | build: "*musllinux*" 201 | dist_name: "musllinux-aarch64" 202 | - name: "manylinux_2_28 aarch64" 203 | os: ubuntu-24.04-arm 204 | cibw_arch: aarch64 205 | build: "*manylinux*" 206 | manylinux: "manylinux_2_28" 207 | dist_name: "manylinux_2_28-aarch64" 208 | steps: 209 | - uses: actions/checkout@v4 210 | 211 | - name: Checkout multibuild 212 | uses: actions/checkout@v4 213 | with: 214 | repository: multi-build/multibuild 215 | path: multibuild 216 | ref: 42d761728d141d8462cd9943f4329f12fe62b155 217 | 218 | - uses: actions/setup-python@v5 219 | with: 220 | python-version: "3.x" 221 | 222 | - name: Install cibuildwheel 223 | run: | 224 | python3 -m pip install -r .ci/requirements-cibw.txt 225 | 226 | - name: Build wheels 227 | run: | 228 | python3 -m cibuildwheel --output-dir wheelhouse 229 | env: 230 | CIBW_ARCHS: ${{ matrix.cibw_arch }} 231 | CIBW_BUILD: ${{ matrix.build }} 232 | CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy 233 | CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }} 234 | CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }} 235 | CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }} 236 | CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }} 237 | MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macosx_deployment_target }} 238 | CIBW_SKIP: "pp36-* pp37-* pp38-* cp36-* cp37-* cp38-* pp39-* pp311-*" 239 | 240 | - uses: actions/upload-artifact@v4 241 | with: 242 | name: dist-${{ matrix.dist_name }} 243 | path: ./wheelhouse/*.whl 244 | 245 | windows: 246 | name: Windows ${{ matrix.cibw_arch }} 247 | runs-on: windows-latest 248 | strategy: 249 | fail-fast: false 250 | matrix: 251 | include: 252 | - cibw_arch: AMD64 253 | steps: 254 | - uses: actions/checkout@v4 255 | 256 | - uses: actions/setup-python@v5 257 | with: 258 | python-version: "3.x" 259 | 260 | - name: Install cibuildwheel 261 | run: | 262 | python.exe -m pip install -r .ci/requirements-cibw.txt 263 | 264 | - name: Install rust toolchain 265 | uses: dtolnay/rust-toolchain@stable 266 | 267 | - name: Prepare for build 268 | run: | 269 | choco install nasm --no-progress 270 | echo "C:\Program Files\NASM" >> $env:GITHUB_PATH 271 | 272 | python.exe -m pip install meson 273 | 274 | curl -LO "https://github.com/lu-zero/cargo-c/releases/latest/download/cargo-c-windows-msvc.zip" 275 | 7z e -y "cargo-c-windows-msvc.zip" -o"${env:USERPROFILE}\.cargo\bin" 276 | 277 | & python.exe winbuild\build_prepare.py -v --architecture=${{ matrix.cibw_arch }} 278 | shell: pwsh 279 | 280 | - name: Build wheels 281 | run: | 282 | setlocal EnableDelayedExpansion 283 | for %%f in (winbuild\build\license\*) do ( 284 | set x=%%~nf 285 | echo. >> LICENSE 286 | echo ===== %%~nf ===== >> LICENSE 287 | echo. >> LICENSE 288 | type %%f >> LICENSE 289 | ) 290 | call winbuild\\build\\build_env.cmd 291 | %pythonLocation%\python.exe -m cibuildwheel . --output-dir wheelhouse 292 | env: 293 | CIBW_ARCHS: ${{ matrix.cibw_arch }} 294 | CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd" 295 | CIBW_CACHE_PATH: "C:\\cibw" 296 | CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy 297 | CIBW_TEST_SKIP: "*-win_arm64 pp*" 298 | CIBW_SKIP: "pp36-* pp37-* pp38-* pp39-* pp311-* cp36-* cp37-* cp38-*" 299 | CIBW_TEST_COMMAND: 'docker run --rm 300 | -v {project}:C:\pillow-avif-plugin 301 | -v C:\cibw:C:\cibw 302 | -v %CD%\..\venv-test:%CD%\..\venv-test 303 | -e CI -e GITHUB_ACTIONS 304 | mcr.microsoft.com/windows/servercore:ltsc2022 305 | powershell C:\pillow-avif-plugin\.github\workflows\wheels-test.ps1 %CD%\..\venv-test' 306 | shell: cmd 307 | 308 | - name: Upload wheels 309 | uses: actions/upload-artifact@v4 310 | with: 311 | name: dist-windows-${{ matrix.cibw_arch }} 312 | path: ./wheelhouse/*.whl 313 | 314 | sdist: 315 | runs-on: ubuntu-latest 316 | steps: 317 | - uses: actions/checkout@v4 318 | 319 | - name: Set up Python 320 | uses: actions/setup-python@v5 321 | with: 322 | python-version: "3.x" 323 | cache: pip 324 | cache-dependency-path: "Makefile" 325 | 326 | - run: | 327 | python3 -m build --help > /dev/null 2>&1 || python3 -m pip install build 328 | python3 -m build --sdist 329 | python3 -m twine --help > /dev/null 2>&1 || python3 -m pip install twine 330 | python3 -m twine check --strict dist/* 331 | 332 | - uses: actions/upload-artifact@v4 333 | with: 334 | name: dist-sdist 335 | path: dist/*.tar.gz 336 | 337 | success: 338 | needs: [legacy, windows, build-native-wheels, sdist] 339 | runs-on: ubuntu-latest 340 | name: Build Successful 341 | steps: 342 | - uses: actions/download-artifact@v4 343 | with: 344 | pattern: dist-* 345 | path: dist 346 | merge-multiple: true 347 | - name: Upload wheels 348 | uses: actions/upload-artifact@v4 349 | with: 350 | name: wheels 351 | path: ./**/*.* 352 | - name: Upload Release 353 | uses: fnkr/github-action-ghr@v1.3 354 | if: startsWith(github.ref, 'refs/tags/') 355 | env: 356 | GHR_PATH: . 357 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 358 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | *.o 8 | *.dSYM 9 | *.gcno 10 | *.gcda 11 | *.gcov 12 | 13 | # Distribution / packaging 14 | .eggs/ 15 | .Python 16 | env/ 17 | bin/ 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | *.egg-info/ 28 | *.dist-info/ 29 | .installed.cfg 30 | *.egg 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .cache 41 | .pytest_cache 42 | coverage.xml 43 | 44 | # Translations 45 | *.mo 46 | 47 | # Mr Developer 48 | .mr.developer.cfg 49 | .project 50 | .pydevproject 51 | 52 | # Rope 53 | .ropeproject 54 | 55 | # Django stuff: 56 | *.log 57 | *.pot 58 | 59 | # viewdoc output 60 | .long-description.html 61 | 62 | # Vim cruft 63 | .*.swp 64 | 65 | #emacs 66 | *~ 67 | \#*# 68 | .#* 69 | 70 | #VS Code 71 | .vscode 72 | 73 | #Komodo 74 | *.komodoproject 75 | 76 | #OS 77 | .DS_Store 78 | 79 | # JetBrains 80 | .idea 81 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | 1.5.2 (Apr 24, 2025) 5 | -------------------- 6 | 7 | * **Fixed**: Make compatible with Pillow 11.2.1, by `@radarhere`_ (`#74`_). 8 | * **CI**: Modify wheel building compile flags to produce smaller binaries. 9 | * **CI**: Update libavif to `2d02044`_ (2025-04-22). This updates libaom to 10 | 3.12.1. See the table below for all AVIF codec versions in this release. 11 | 12 | .. table:: 13 | 14 | =========== ========== 15 | libavif 1.2.1 16 | **libaom** **3.12.1** 17 | dav1d 1.5.1 18 | SVT-AV1 3.0.1 19 | rav1e 0.7.1 20 | =========== ========== 21 | 22 | .. _2d02044: https://github.com/AOMediaCodec/libavif/commit/2d0204485a30446d82770c115e0a4d61e2819f23 23 | .. _#74: https://github.com/fdintino/pillow-avif-plugin/pull/74 24 | .. _@radarhere: https://github.com/radarhere 25 | 26 | 1.5.1 27 | ----- 28 | 29 | * **CI**: Update libavif to 1.2.1. The only library version change since 30 | 1.5.0 is SVT-AV1, which was upgraded from 3.0.0 to 3.0.1. See the table 31 | below for all AVIF codec versions in this release. 32 | 33 | .. table:: 34 | 35 | =========== ========== 36 | **libavif** **1.2.1** 37 | libaom 3.12.0 38 | dav1d 1.5.1 39 | **SVT-AV1** **3.0.1** 40 | rav1e 0.7.1 41 | =========== ========== 42 | 43 | 1.5.0 (Mar 7, 2025) 44 | ------------------- 45 | 46 | * **Fixed**: Convert AVIF irot and imir into EXIF orientation when decoding 47 | an image, in `#70`_. EXIF orientation has been preserved by the encoder 48 | since 1.4.2, which is when we started setting irot and imir. But if an AVIF 49 | image with non-default irot or imir values was converted to another format, 50 | its orientation would be lost. 51 | * **Fixed**: ``pillow_avif.AvifImagePlugin.CHROMA_UPSAMPLING`` is now actually 52 | used when decoding an image, in `#70`_. 53 | * **Fixed**: ``TypeError`` when saving images with float frame durations, by 54 | `@BlackSmith`_ in `#68`_ (merged from `#71`_) 55 | * **Added**: Python 3.13 free-thread mode support (experimental). 56 | * **CI**: Update libavif to 1.2.0 (`4eb0a40`_, 2025-03-05); publish wheels 57 | for python 3.13. See the table below for the current AVIF codec versions. 58 | Libraries whose versions have changed since the last pillow-avif-plugin 59 | release are bolded. 60 | 61 | .. table:: 62 | 63 | =========== ========== 64 | **libavif** **1.2.0** (`4eb0a40`_) 65 | **libaom** **3.12.0** 66 | **dav1d** **1.5.1** 67 | **SVT-AV1** **3.0.0** 68 | rav1e 0.7.1 69 | =========== ========== 70 | 71 | .. _#68: https://github.com/fdintino/pillow-avif-plugin/pull/68 72 | .. _#70: https://github.com/fdintino/pillow-avif-plugin/pull/70 73 | .. _#71: https://github.com/fdintino/pillow-avif-plugin/pull/71 74 | .. _4eb0a40: https://github.com/AOMediaCodec/libavif/commit/4eb0a40fb06612adf53650a14c692eaf62c068e6 75 | .. _@BlackSmith: https://github.com/BlackSmith 76 | 77 | 1.4.6 (Jul 14, 2024) 78 | -------------------- 79 | 80 | * **Fixed**: macOS arm64 illegal instruction segmentation fault with aom 81 | encoding in `#60`_ and `#61`_; fixes `#59`_. 82 | 83 | .. _#59: https://github.com/fdintino/pillow-avif-plugin/issues/59 84 | .. _#60: https://github.com/fdintino/pillow-avif-plugin/pull/60 85 | .. _#61: https://github.com/fdintino/pillow-avif-plugin/pull/61 86 | 87 | 1.4.4 (Jul 8, 2024) 88 | ------------------- 89 | 90 | * **CI**: bump libavif to `e10e6d9`_ (2024-07-01); fix CI build issues 91 | in `#53`_. See table below for new versions (all versions are 92 | upgrades from the 1.4.3 release). 93 | 94 | +------------------------------------+-------------------------+ 95 | | **libavif** | **1.0.3** (`e10e6d9`_) | 96 | +------------------------------------+-------------------------+ 97 | | **libaom** | **3.9.1** | 98 | +------------------------------------+-------------------------+ 99 | | **dav1d** | **1.4.3** | 100 | +------------------------------------+-------------------------+ 101 | | **SVT-AV1** | **2.1.1** | 102 | +------------------------------------+-------------------------+ 103 | | **rav1e** | **0.7.1** | 104 | +------------------------------------+-------------------------+ 105 | 106 | * **Feature**: Allow users to pass ``max_threads`` to the avif encoder via 107 | ``Image.save`` by `@yit-b`_ in `#54`_, originally in `#49`_. 108 | 109 | * **Feature**: Let users pass ``max_threads`` as an argument to 110 | ``_avif.AvifDecoder`` by `@yit-b`_ in `#50`_. 111 | 112 | * **CI**: build SVT-AV1 for aarch64 or arm64 by `@RaphaelVRossi`_ in `#38`_. 113 | 114 | * **Fixed**: keep alpha channel for images with mode P and custom 115 | transparency in `#56`_; fixes `#48`_. 116 | 117 | * **Fixed**: disable decoder strictness for ``clap`` and ``pixi`` properties 118 | in `#57`_. fixes `#13`_ and `#28`_. 119 | 120 | * **CI**: lint secrets permission error and macOS GHA runner homebrew 121 | ``PATH`` bug in `#55`_. 122 | 123 | .. _e10e6d9: https://github.com/AOMediaCodec/libavif/commit/e10e6d98e6d1dbcdd409859a924d1b607a1e06dc 124 | .. _#53: https://github.com/fdintino/pillow-avif-plugin/pull/53 125 | .. _#54: https://github.com/fdintino/pillow-avif-plugin/pull/54 126 | .. _#49: https://github.com/fdintino/pillow-avif-plugin/pull/49 127 | .. _#50: https://github.com/fdintino/pillow-avif-plugin/pull/50 128 | .. _@RaphaelVRossi: https://github.com/RaphaelVRossi 129 | .. _#38: https://github.com/fdintino/pillow-avif-plugin/pull/38 130 | .. _#56: https://github.com/fdintino/pillow-avif-plugin/pull/56 131 | .. _#48: https://github.com/fdintino/pillow-avif-plugin/issues/48 132 | .. _#57: https://github.com/fdintino/pillow-avif-plugin/pull/57 133 | .. _#13: https://github.com/fdintino/pillow-avif-plugin/issues/13 134 | .. _#28: https://github.com/fdintino/pillow-avif-plugin/issues/28 135 | .. _#55: https://github.com/fdintino/pillow-avif-plugin/pull/55 136 | 137 | 1.4.3 (Feb 8, 2024) 138 | ------------------- 139 | 140 | - **Fixed**: Limit maxThreads to 64 for aom encodes by `@yit-b`_ (`#41`_). 141 | Fixes `#23`_. 142 | - **Tests**: fix pytest deprecation warning (`#42`_). 143 | - **CI**: update libavif to v1.0.3 and update transitive dependencies (`#43`_). 144 | See table below; changes from previous release in bold. 145 | 146 | =========== ========= 147 | **libavif** **1.0.3** 148 | **libaom** **3.8.1** 149 | **dav1d** **1.3.0** 150 | SVT-AV1 1.7.0 151 | **rav1e** **0.7.0** 152 | =========== ========= 153 | 154 | .. _@yit-b: https://github.com/yit-b 155 | .. _#41: https://github.com/fdintino/pillow-avif-plugin/pull/41 156 | .. _#42: https://github.com/fdintino/pillow-avif-plugin/pull/42 157 | .. _#23: https://github.com/fdintino/pillow-avif-plugin/issues/23 158 | .. _#43: https://github.com/fdintino/pillow-avif-plugin/pull/43 159 | 160 | 1.4.2 (Jan 9, 2024) 161 | ------------------- 162 | 163 | * **Fixed**: Convert EXIF orientation to AVIF irot and imir in `#40`_. 164 | 165 | .. _#40: https://github.com/fdintino/pillow-avif-plugin/pull/40 166 | 167 | 1.4.1 (Oct 12, 2023) 168 | -------------------- 169 | 170 | * **Fixed**: Issue `#32`_ cannot access local variable 'quality' in `#33`_. 171 | 172 | .. _#32: https://github.com/fdintino/pillow-avif-plugin/issues/32 173 | .. _#33: https://github.com/fdintino/pillow-avif-plugin/pull/33 174 | 175 | 1.4.0 (Sep 24, 2023) 176 | -------------------- 177 | 178 | * **Feature**: Support new libavif quality encoder option. This 179 | replaces the (now deprecated) qmin and qmax options in libavif 1.x 180 | * **CI**: Publish python 3.12 wheels 181 | * **CI**: Stop publishing manylinux1 and 32-bit wheels, following the 182 | lead of Pillow 183 | * **CI**: Fix zlib 1.2.11 download link invalid, update to 1.2.13 by 184 | `@gamefunc`_ in `#22`_ 185 | * **CI**: Update bundled libraries (`#27`_) (see table below, 186 | changes from previous release in bold) 187 | * **CI**: Bundle rav1e in windows wheels (fixes `#25`_). 188 | 189 | =========== ========= 190 | **libavif** **1.0.1** 191 | **libaom** **3.7.0** 192 | **dav1d** **1.2.1** 193 | **SVT-AV1** **1.7.0** 194 | **rav1e** **0.6.6** 195 | =========== ========= 196 | 197 | .. _@gamefunc: https://github.com/gamefunc 198 | .. _#22: https://github.com/fdintino/pillow-avif-plugin/pull/22 199 | .. _#27: https://github.com/fdintino/pillow-avif-plugin/pull/27 200 | .. _#25: https://github.com/fdintino/pillow-avif-plugin/issues/25 201 | 202 | 1.3.1 (Nov 2, 2022) 203 | ------------------- 204 | 205 | * **Fixed**: Distributed OS X wheels now include patch for libaom segmentation 206 | fault (see `AOMediaCodec/libavif#1190`_ and `aom@165281`_). The bundled 207 | static libaom was patched for all other wheels, but because of a build issue 208 | it was missing from the 1.3.0 mac wheels. 209 | * **CI**: Python 3.6 wheels are no longer being packaged and distributed, 210 | ahead of support being dropped in the next major release. 211 | 212 | .. _AOMediaCodec/libavif#1190: https://github.com/AOMediaCodec/libavif/issues/1190 213 | .. _aom@165281: https://aomedia-review.googlesource.com/c/aom/+/165281/1 214 | 215 | 1.3.0 (Oct 29, 2022) 216 | -------------------- 217 | 218 | * **Changed**: Default ``quality`` changed to 75 (was previously 90) 219 | * **Changed**: Default ``speed`` changed to 6 (was previously 8) 220 | * **Added**: autotiling feature (default ``True`` if ``tile_rows`` and 221 | ``tile_cols`` are unset, can be disabled with ``autotiling=False`` passed to 222 | ``save()``). 223 | * **Fixed**: ``tile_cols`` encoder setting (the ``save()`` method was using 224 | the value passed to ``tile_rows`` instead) 225 | * **Fixed**: Attempts to open non-AV1 images in HEIF containers (e.g. HEIC) 226 | now raise UnidentifiedImageError, not ValueError. Fixes `#19`_. 227 | * **CI**: manylinux2014 aarch64 wheels 228 | * **CI**: bundle libyuv 229 | * **CI**: Python 3.11 wheels 230 | * **CI**: Update bundled libraries (see table below, changes from previous 231 | release in bold) 232 | 233 | .. _#19: https://github.com/fdintino/pillow-avif-plugin/issues/19 234 | 235 | .. table:: 236 | 237 | =========== ========== 238 | **libavif** **0.11.0** 239 | **libaom** **3.5.0** 240 | **dav1d** **1.0.0** 241 | **SVT-AV1** **1.3.0** 242 | rav1e 0.5.1 243 | =========== ========== 244 | 245 | 1.2.2 (Apr 20, 2022) 246 | -------------------- 247 | 248 | * **CI**: Build musllinux wheels 249 | * **CI**: Update bundled libraries (see table below, changes from previous 250 | release in bold) 251 | 252 | .. table:: 253 | 254 | =========== ========== 255 | **libavif** **0.10.1** 256 | **libaom** **3.3.0** 257 | **dav1d** **1.0.0** 258 | **SVT-AV1** **0.9.1** 259 | **rav1e** **0.5.1** 260 | =========== ========== 261 | 262 | 1.2.1 (Oct 14, 2021) 263 | -------------------- 264 | 265 | * **Fixed**: Accept all AVIF compatible brands in the FileTypeBox. Fixes `#5`_. 266 | * **CI**: Add Python 3.10 wheels 267 | * **CI**: Add OS X ARM64 wheels 268 | * **CI**: Update bundled libraries (see table below, changes from previous 269 | release in bold) 270 | 271 | .. _#5: https://github.com/fdintino/pillow-avif-plugin/issues/5 272 | 273 | .. table:: 274 | 275 | =========== ========== 276 | libavif 0.9.2 277 | libaom 2.0.2 278 | **dav1d** **0.9.2** 279 | SVT-AV1 0.8.7 280 | rav1e 0.4.0 281 | =========== ========== 282 | 283 | 1.2.0 (Jul 19, 2021) 284 | -------------------- 285 | 286 | * **Added**: ``tile_rows`` encoder setting 287 | * **Added**: ``alpha_premultiplied`` encoder setting 288 | * **Added**: ``advanced`` encoder setting to pass codec-specific key-value 289 | options 290 | * **CI**: Update bundled libraries (see table below, changes from previous 291 | release in bold) 292 | 293 | .. table:: 294 | 295 | =========== ========== 296 | **libavif** **0.9.2** 297 | libaom 2.0.2 298 | **dav1d** **0.9.0** 299 | **SVT-AV1** **0.8.7** 300 | rav1e 0.4.0 301 | =========== ========== 302 | 303 | 1.1.0 (Apr 11, 2021) 304 | -------------------- 305 | 306 | * **Added**: ``quality`` kwarg for ``save`` that maps to min and max quantizer 307 | values. 308 | * **Changed**: ``yuv_format`` kwarg renamed ``subsampling``. 309 | * **CI**: Update bundled libraries (see table below, changes from previous 310 | release in bold) 311 | 312 | 313 | 314 | .. table:: 315 | 316 | ======== ======== 317 | libavif 0.9.0 318 | libaom 2.0.2 319 | dav1d 0.8.2 320 | SVT-AV1 0.8.6 321 | rav1e 0.4.0 322 | ======== ======== 323 | 324 | 1.0.1 (Feb 23, 2021) 325 | -------------------- 326 | 327 | * Fix: Allow saving of a single image from a sequence. Fixes `#1`_. 328 | 329 | .. _#1: https://github.com/fdintino/pillow-avif-plugin/issues/1 330 | 331 | 1.0.0 (Feb 1, 2021) 332 | ------------------- 333 | 334 | Initial release 335 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021, Frankie Dintino. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are met: 5 | 6 | * Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 9 | * Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 14 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 17 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 18 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 19 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 20 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 21 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 22 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include tox.ini 3 | graft src 4 | 5 | global-exclude *.pyc 6 | global-exclude *.so 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pillow-avif-plugin 2 | 3 | This is a plugin that adds support for AVIF files until official support has been added (see [this pull request](https://github.com/python-pillow/Pillow/pull/5201)). 4 | 5 | To register this plugin with pillow you will need to add `import pillow_avif` somewhere in your application. 6 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | # Avoid "Missing base report" due to committing CHANGES.rst with "[CI skip]" 3 | # https://github.com/codecov/support/issues/363 4 | # https://docs.codecov.io/docs/comparing-commits 5 | allow_coverage_offsets: true 6 | 7 | comment: false 8 | 9 | coverage: 10 | status: 11 | project: 12 | default: 13 | threshold: 0.01% 14 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | pytest_plugins = ["tests.helper"] 2 | -------------------------------------------------------------------------------- /depends/install_libavif.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | if [ $(uname) != "Darwin" ]; then 5 | TRAVIS_OS_NAME="manylinux$MB_ML_VER" 6 | fi 7 | 8 | LIBAVIF_CMAKE_FLAGS=() 9 | 10 | if uname -s | grep -q Darwin; then 11 | if [ -w /usr/local ]; then 12 | PREFIX=/usr/local 13 | else 14 | PREFIX=$(brew --prefix) 15 | fi 16 | else 17 | PREFIX=/usr 18 | fi 19 | 20 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" 21 | 22 | PKGCONFIG=${PKGCONFIG:-pkg-config} 23 | 24 | export CFLAGS="-fPIC -O3 $CFLAGS" 25 | export CXXFLAGS="-fPIC -O3 $CXXFLAGS" 26 | 27 | ARCHIVE="${LIBAVIF_VERSION}.tar.gz" 28 | if [[ "$LIBAVIF_VERSION" == *"."* ]]; then 29 | ARCHIVE="v${ARCHIVE}" 30 | HAS_EXT_DIR=1 31 | fi 32 | 33 | echo "::group::Fetching libavif" 34 | mkdir -p libavif-$LIBAVIF_VERSION 35 | curl -sLo - \ 36 | https://github.com/AOMediaCodec/libavif/archive/$ARCHIVE \ 37 | | tar --strip-components=1 -C libavif-$LIBAVIF_VERSION -zxf - 38 | pushd libavif-$LIBAVIF_VERSION 39 | echo "::endgroup::" 40 | 41 | if [ "$LIBAVIF_VERSION" != "0.11.0" ]; then 42 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_LIBYUV=LOCAL) 43 | HAS_EXT_DIR= 44 | fi 45 | 46 | HAS_DECODER=0 47 | HAS_ENCODER=0 48 | 49 | if $PKGCONFIG --exists dav1d; then 50 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_DAV1D=ON) 51 | HAS_DECODER=1 52 | fi 53 | 54 | if $PKGCONFIG --exists rav1e; then 55 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_RAV1E=ON) 56 | HAS_ENCODER=1 57 | fi 58 | 59 | if $PKGCONFIG --exists SvtAv1Enc; then 60 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_SVT=ON) 61 | HAS_ENCODER=1 62 | fi 63 | 64 | if $PKGCONFIG --exists libgav1; then 65 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_LIBGAV1=ON) 66 | HAS_DECODER=1 67 | fi 68 | 69 | if $PKGCONFIG --exists aom; then 70 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_AOM=ON) 71 | HAS_ENCODER=1 72 | HAS_DECODER=1 73 | fi 74 | 75 | if [ "$HAS_ENCODER" != 1 ] || [ "$HAS_DECODER" != 1 ]; then 76 | if [ -n "${HAS_EXT_DIR}" ]; then 77 | echo "::group::Building aom" 78 | pushd ext > /dev/null 79 | bash aom.cmd 80 | popd > /dev/null 81 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_AOM=ON -DAVIF_LOCAL_AOM=ON) 82 | else 83 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_AOM=LOCAL) 84 | fi 85 | echo "::endgroup::" 86 | fi 87 | 88 | if uname -s | grep -q Darwin; then 89 | # Prevent cmake from using @rpath in install id, so that delocate can 90 | # find and bundle the libavif dylib 91 | LIBAVIF_CMAKE_FLAGS+=("-DCMAKE_INSTALL_NAME_DIR=$PREFIX/lib" -DCMAKE_MACOSX_RPATH=OFF) 92 | fi 93 | 94 | echo "::group::Building libavif" 95 | mkdir build 96 | pushd build 97 | cmake .. \ 98 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 99 | -DCMAKE_BUILD_TYPE=Release \ 100 | "${LIBAVIF_CMAKE_FLAGS[@]}" 101 | make 102 | popd 103 | 104 | popd 105 | echo "::endgroup::" 106 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.cibuildwheel] 6 | before-all = ".github/workflows/wheels-dependencies.sh" 7 | build-verbosity = 1 8 | test-command = "cd {project} && .github/workflows/wheels-test.sh" 9 | test-extras = "tests" 10 | 11 | [tool.cibuildwheel.macos.environment] 12 | PATH = "$(pwd)/build/deps/darwin/bin:$(dirname $(which python3)):/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin" 13 | CFLAGS = "-I$(pwd)/build/deps/darwin/include" 14 | LDFLAGS = "-L$(pwd)/build/deps/darwin/lib" 15 | 16 | [tool.cibuildwheel.linux] 17 | before-test = "yum install -y libtiff-devel libjpeg-devel zlib-devel libpng-devel" 18 | 19 | [[tool.cibuildwheel.overrides]] 20 | select = "*-musllinux*" 21 | before-test = "apk update && apk add jpeg-dev tiff-dev zlib-dev libpng-dev" 22 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = E203 3 | max-line-length = 88 4 | 5 | [tool:pytest] 6 | addopts = --tb=short 7 | testpaths = tests 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from io import open 3 | import os 4 | import sys 5 | 6 | from setuptools import Extension, setup 7 | 8 | 9 | def version(): 10 | filename = "src/pillow_avif/__init__.py" 11 | with open(filename) as f: 12 | tree = ast.parse(f.read(), filename) 13 | for node in tree.body: 14 | if isinstance(node, ast.Assign) and len(node.targets) == 1: 15 | (target,) = node.targets 16 | if isinstance(target, ast.Name) and target.id == "__version__": 17 | return node.value.s 18 | 19 | 20 | def readme(): 21 | try: 22 | with open("README.md") as f: 23 | return f.read() 24 | except IOError: 25 | pass 26 | 27 | 28 | IS_DEBUG = hasattr(sys, "gettotalrefcount") 29 | PLATFORM_MINGW = os.name == "nt" and "GCC" in sys.version 30 | 31 | libraries = ["avif"] 32 | if sys.platform == "win32": 33 | libraries.extend( 34 | [ 35 | "advapi32", 36 | "bcrypt", 37 | "ntdll", 38 | "userenv", 39 | "ws2_32", 40 | "kernel32", 41 | ] 42 | ) 43 | 44 | test_requires = [ 45 | "pytest", 46 | "packaging", 47 | "pytest-cov", 48 | "test-image-results", 49 | "pillow", 50 | ] 51 | 52 | setup( 53 | name="pillow-avif-plugin", 54 | description="A pillow plugin that adds avif support via libavif", 55 | long_description=readme(), 56 | long_description_content_type="text/markdown", 57 | version=version(), 58 | ext_modules=[ 59 | Extension( 60 | "pillow_avif._avif", 61 | ["src/pillow_avif/_avif.c"], 62 | depends=["avif/avif.h"], 63 | libraries=libraries, 64 | ), 65 | ], 66 | package_data={"": ["README.rst"]}, 67 | package_dir={"": "src"}, 68 | packages=["pillow_avif"], 69 | license="MIT License", 70 | author="Frankie Dintino", 71 | author_email="fdintino@theatlantic.com", 72 | url="https://github.com/fdintino/pillow-avif-plugin/", 73 | download_url="https://github.com/fdintino/pillow-avif-plugin/releases", 74 | install_requires=[], 75 | extras_require={"tests": test_requires}, 76 | classifiers=[ 77 | "Development Status :: 5 - Production/Stable", 78 | "Environment :: Web Environment", 79 | "Intended Audience :: Developers", 80 | "License :: OSI Approved :: MIT License", 81 | "Operating System :: OS Independent", 82 | "Programming Language :: C", 83 | "Programming Language :: C++", 84 | "Programming Language :: Python :: 2.7", 85 | "Programming Language :: Python :: 3", 86 | "Programming Language :: Python :: 3.7", 87 | "Programming Language :: Python :: 3.8", 88 | "Programming Language :: Python :: 3.9", 89 | "Programming Language :: Python :: 3.10", 90 | "Programming Language :: Python :: 3.11", 91 | "Programming Language :: Python :: 3.12", 92 | "Programming Language :: Python :: Implementation :: CPython", 93 | "Programming Language :: Python :: Implementation :: PyPy", 94 | "Topic :: Multimedia :: Graphics", 95 | "Topic :: Multimedia :: Graphics :: Graphics Conversion", 96 | ], 97 | zip_safe=not (IS_DEBUG or PLATFORM_MINGW), 98 | ) 99 | -------------------------------------------------------------------------------- /src/pillow_avif/AvifImagePlugin.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | from io import BytesIO 4 | import sys 5 | 6 | from PIL import ExifTags, Image, ImageFile 7 | 8 | try: 9 | from pillow_avif import _avif 10 | 11 | SUPPORTED = True 12 | except ImportError: 13 | SUPPORTED = False 14 | 15 | # Decoder options as module globals, until there is a way to pass parameters 16 | # to Image.open (see https://github.com/python-pillow/Pillow/issues/569) 17 | DECODE_CODEC_CHOICE = "auto" 18 | CHROMA_UPSAMPLING = "auto" 19 | # Decoding is only affected by this for libavif **0.8.4** or greater. 20 | DEFAULT_MAX_THREADS = 0 21 | 22 | if sys.version_info[0] == 2: 23 | text_type = unicode # noqa 24 | else: 25 | text_type = str 26 | 27 | 28 | def _accept(prefix): 29 | if prefix[4:8] != b"ftyp": 30 | return False 31 | major_brand = prefix[8:12] 32 | if major_brand in ( 33 | # coding brands 34 | b"avif", 35 | b"avis", 36 | # We accept files with AVIF container brands; we can't yet know if 37 | # the ftyp box has the correct compatible brands, but if it doesn't 38 | # then the plugin will raise a SyntaxError which Pillow will catch 39 | # before moving on to the next plugin that accepts the file. 40 | # 41 | # Also, because this file might not actually be an AVIF file, we 42 | # don't raise an error if AVIF support isn't properly compiled. 43 | b"mif1", 44 | b"msf1", 45 | ): 46 | if not SUPPORTED: 47 | return ( 48 | "image file could not be identified because AVIF support not installed" 49 | ) 50 | return True 51 | return False 52 | 53 | 54 | class AvifImageFile(ImageFile.ImageFile): 55 | format = "AVIF" 56 | format_description = "AVIF image" 57 | __frame = -1 58 | 59 | def _open(self): 60 | if not SUPPORTED: 61 | msg = "image file could not be opened because AVIF support not installed" 62 | raise SyntaxError(msg) 63 | 64 | if DECODE_CODEC_CHOICE != "auto" and not _avif.decoder_codec_available( 65 | DECODE_CODEC_CHOICE 66 | ): 67 | msg = "Invalid opening codec" 68 | raise ValueError(msg) 69 | self._decoder = _avif.AvifDecoder( 70 | self.fp.read(), DECODE_CODEC_CHOICE, CHROMA_UPSAMPLING, DEFAULT_MAX_THREADS 71 | ) 72 | 73 | # Get info from decoder 74 | ( 75 | width, 76 | height, 77 | self.n_frames, 78 | mode, 79 | icc, 80 | exif, 81 | exif_orientation, 82 | xmp, 83 | ) = self._decoder.get_info() 84 | self._size = (width, height) 85 | self.is_animated = self.n_frames > 1 86 | try: 87 | self.mode = self.rawmode = mode 88 | except AttributeError: 89 | self._mode = self.rawmode = mode 90 | 91 | if icc: 92 | self.info["icc_profile"] = icc 93 | if xmp: 94 | self.info["xmp"] = xmp 95 | 96 | if exif_orientation != 1 or exif: 97 | exif_data = Image.Exif() 98 | orientation_tag = next( 99 | k for k, v in ExifTags.TAGS.items() if v == "Orientation" 100 | ) 101 | if exif: 102 | exif_data.load(exif) 103 | original_orientation = exif_data.get(orientation_tag, 1) 104 | else: 105 | original_orientation = 1 106 | if exif_orientation != original_orientation: 107 | exif_data[orientation_tag] = exif_orientation 108 | exif = exif_data.tobytes() 109 | if exif: 110 | self.info["exif"] = exif 111 | self.seek(0) 112 | 113 | def seek(self, frame): 114 | if not self._seek_check(frame): 115 | return 116 | 117 | # Set tile 118 | self.__frame = frame 119 | if hasattr(ImageFile, "_Tile"): 120 | self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 0, self.mode)] 121 | else: 122 | self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] 123 | 124 | def load(self): 125 | if self.tile: 126 | # We need to load the image data for this frame 127 | ( 128 | data, 129 | timescale, 130 | pts_in_timescales, 131 | duration_in_timescales, 132 | ) = self._decoder.get_frame(self.__frame) 133 | self.info["timestamp"] = round(1000 * (pts_in_timescales / timescale)) 134 | self.info["duration"] = round(1000 * (duration_in_timescales / timescale)) 135 | 136 | if self.fp and self._exclusive_fp: 137 | self.fp.close() 138 | self.fp = BytesIO(data) 139 | 140 | return super(AvifImageFile, self).load() 141 | 142 | def load_seek(self, pos): 143 | pass 144 | 145 | def tell(self): 146 | return self.__frame 147 | 148 | 149 | def _save_all(im, fp, filename): 150 | _save(im, fp, filename, save_all=True) 151 | 152 | 153 | def _save(im, fp, filename, save_all=False): 154 | info = im.encoderinfo.copy() 155 | if save_all: 156 | append_images = list(info.get("append_images", [])) 157 | else: 158 | append_images = [] 159 | 160 | total = 0 161 | for ims in [im] + append_images: 162 | total += getattr(ims, "n_frames", 1) 163 | 164 | qmin = info.get("qmin", -1) 165 | qmax = info.get("qmax", -1) 166 | quality = info.get("quality", 75) 167 | if not isinstance(quality, int) or quality < 0 or quality > 100: 168 | msg = "Invalid quality setting" 169 | raise ValueError(msg) 170 | 171 | duration = info.get("duration", 0) 172 | subsampling = info.get("subsampling", "4:2:0") 173 | speed = info.get("speed", 6) 174 | max_threads = info.get("max_threads", DEFAULT_MAX_THREADS) 175 | codec = info.get("codec", "auto") 176 | if codec != "auto" and not _avif.encoder_codec_available(codec): 177 | msg = "Invalid saving codec" 178 | raise ValueError(msg) 179 | range_ = info.get("range", "full") 180 | tile_rows_log2 = info.get("tile_rows", 0) 181 | tile_cols_log2 = info.get("tile_cols", 0) 182 | alpha_premultiplied = bool(info.get("alpha_premultiplied", False)) 183 | autotiling = bool(info.get("autotiling", tile_rows_log2 == tile_cols_log2 == 0)) 184 | 185 | icc_profile = info.get("icc_profile", im.info.get("icc_profile")) 186 | exif = info.get("exif", im.info.get("exif")) 187 | if isinstance(exif, Image.Exif): 188 | exif = exif.tobytes() 189 | 190 | exif_orientation = 0 191 | if exif: 192 | exif_data = Image.Exif() 193 | try: 194 | exif_data.load(exif) 195 | except SyntaxError: 196 | pass 197 | else: 198 | orientation_tag = next( 199 | k for k, v in ExifTags.TAGS.items() if v == "Orientation" 200 | ) 201 | exif_orientation = exif_data.get(orientation_tag) or 0 202 | 203 | xmp = info.get("xmp", im.info.get("xmp") or im.info.get("XML:com.adobe.xmp")) 204 | 205 | if isinstance(xmp, text_type): 206 | xmp = xmp.encode("utf-8") 207 | 208 | advanced = info.get("advanced") 209 | if advanced is not None: 210 | if isinstance(advanced, dict): 211 | advanced = tuple(advanced.items()) 212 | try: 213 | advanced = tuple(advanced) 214 | except TypeError: 215 | invalid = True 216 | else: 217 | invalid = any(not isinstance(v, tuple) or len(v) != 2 for v in advanced) 218 | if invalid: 219 | msg = ( 220 | "advanced codec options must be a dict of key-value string " 221 | "pairs or a series of key-value two-tuples" 222 | ) 223 | raise ValueError(msg) 224 | advanced = tuple( 225 | [(str(k).encode("utf-8"), str(v).encode("utf-8")) for k, v in advanced] 226 | ) 227 | 228 | # Setup the AVIF encoder 229 | enc = _avif.AvifEncoder( 230 | im.size[0], 231 | im.size[1], 232 | subsampling, 233 | qmin, 234 | qmax, 235 | quality, 236 | speed, 237 | max_threads, 238 | codec, 239 | range_, 240 | tile_rows_log2, 241 | tile_cols_log2, 242 | alpha_premultiplied, 243 | autotiling, 244 | icc_profile or b"", 245 | exif or b"", 246 | exif_orientation, 247 | xmp or b"", 248 | advanced, 249 | ) 250 | 251 | # Add each frame 252 | frame_idx = 0 253 | frame_duration = 0 254 | cur_idx = im.tell() 255 | is_single_frame = total == 1 256 | try: 257 | for ims in [im] + append_images: 258 | # Get # of frames in this image 259 | nfr = getattr(ims, "n_frames", 1) 260 | 261 | for idx in range(nfr): 262 | ims.seek(idx) 263 | ims.load() 264 | 265 | # Make sure image mode is supported 266 | frame = ims 267 | rawmode = ims.mode 268 | if ims.mode not in {"RGB", "RGBA"}: 269 | alpha = ( 270 | "A" in ims.mode 271 | or "a" in ims.mode 272 | or (ims.mode == "P" and "A" in ims.im.getpalettemode()) 273 | or ( 274 | ims.mode == "P" 275 | and ims.info.get("transparency", None) is not None 276 | ) 277 | ) 278 | rawmode = "RGBA" if alpha else "RGB" 279 | frame = ims.convert(rawmode) 280 | 281 | # Update frame duration 282 | if isinstance(duration, (list, tuple)): 283 | frame_duration = duration[frame_idx] 284 | else: 285 | frame_duration = duration 286 | 287 | # Append the frame to the animation encoder 288 | enc.add( 289 | frame.tobytes("raw", rawmode), 290 | int(frame_duration), 291 | frame.size[0], 292 | frame.size[1], 293 | rawmode, 294 | is_single_frame, 295 | ) 296 | 297 | # Update frame index 298 | frame_idx += 1 299 | 300 | if not save_all: 301 | break 302 | 303 | finally: 304 | im.seek(cur_idx) 305 | 306 | # Get the final output from the encoder 307 | data = enc.finish() 308 | if data is None: 309 | msg = "cannot write file as AVIF (encoder returned None)" 310 | raise OSError(msg) 311 | 312 | fp.write(data) 313 | 314 | 315 | # Prevent Pillow's AVIF plugin from replacing this plugin 316 | try: 317 | from PIL import AvifImagePlugin # noqa: F401 318 | except ImportError: 319 | pass 320 | 321 | Image.register_open(AvifImageFile.format, AvifImageFile, _accept) 322 | if SUPPORTED: 323 | Image.register_save(AvifImageFile.format, _save) 324 | Image.register_save_all(AvifImageFile.format, _save_all) 325 | Image.register_extensions(AvifImageFile.format, [".avif", ".avifs"]) 326 | Image.register_mime(AvifImageFile.format, "image/avif") 327 | -------------------------------------------------------------------------------- /src/pillow_avif/__init__.py: -------------------------------------------------------------------------------- 1 | from . import AvifImagePlugin 2 | 3 | 4 | __all__ = ["AvifImagePlugin"] 5 | __version__ = "1.5.2" 6 | -------------------------------------------------------------------------------- /src/pillow_avif/_avif.c: -------------------------------------------------------------------------------- 1 | #define PY_SSIZE_T_CLEAN 2 | 3 | #include 4 | #include "avif/avif.h" 5 | 6 | #if AVIF_VERSION < 80300 7 | #define AVIF_CHROMA_UPSAMPLING_AUTOMATIC AVIF_CHROMA_UPSAMPLING_BILINEAR 8 | #define AVIF_CHROMA_UPSAMPLING_BEST_QUALITY AVIF_CHROMA_UPSAMPLING_BILINEAR 9 | #define AVIF_CHROMA_UPSAMPLING_FASTEST AVIF_CHROMA_UPSAMPLING_NEAREST 10 | #endif 11 | 12 | // Encoder type 13 | typedef struct { 14 | PyObject_HEAD 15 | avifEncoder *encoder; 16 | avifImage *image; 17 | PyObject *icc_bytes; 18 | PyObject *exif_bytes; 19 | PyObject *xmp_bytes; 20 | int first_frame; 21 | } AvifEncoderObject; 22 | 23 | static PyTypeObject AvifEncoder_Type; 24 | 25 | // Decoder type 26 | typedef struct { 27 | PyObject_HEAD 28 | avifDecoder *decoder; 29 | PyObject *data; 30 | avifChromaUpsampling upsampling; 31 | } AvifDecoderObject; 32 | 33 | static PyTypeObject AvifDecoder_Type; 34 | 35 | static int default_max_threads = 0; 36 | 37 | static void 38 | init_max_threads(void) { 39 | PyObject *os = NULL; 40 | PyObject *n = NULL; 41 | long num_cpus; 42 | 43 | #if PY_VERSION_HEX >= 0x03000000 44 | os = PyImport_ImportModule("os"); 45 | #else 46 | os = PyImport_ImportModule("multiprocessing"); 47 | #endif 48 | if (os == NULL) { 49 | goto error; 50 | } 51 | 52 | if (PyObject_HasAttrString(os, "sched_getaffinity")) { 53 | n = PyObject_CallMethod(os, "sched_getaffinity", "i", 0); 54 | if (n == NULL) { 55 | goto error; 56 | } 57 | num_cpus = PySet_Size(n); 58 | } else { 59 | n = PyObject_CallMethod(os, "cpu_count", NULL); 60 | if (n == NULL) { 61 | goto error; 62 | } 63 | #if PY_VERSION_HEX >= 0x03000000 64 | num_cpus = PyLong_AsLong(n); 65 | #else 66 | num_cpus = PyInt_AsLong(n); 67 | #endif 68 | } 69 | 70 | if (num_cpus < 1) { 71 | goto error; 72 | } 73 | 74 | default_max_threads = (int)num_cpus; 75 | 76 | done: 77 | Py_XDECREF(os); 78 | Py_XDECREF(n); 79 | return; 80 | 81 | error: 82 | if (PyErr_Occurred()) { 83 | PyErr_Clear(); 84 | } 85 | PyErr_WarnEx( 86 | PyExc_RuntimeWarning, "could not get cpu count: using max_threads=1", 1); 87 | goto done; 88 | } 89 | 90 | #if AVIF_VERSION < 1000000 91 | static int 92 | normalize_quantize_value(int qvalue) { 93 | if (qvalue < AVIF_QUANTIZER_BEST_QUALITY) { 94 | return AVIF_QUANTIZER_BEST_QUALITY; 95 | } else if (qvalue > AVIF_QUANTIZER_WORST_QUALITY) { 96 | return AVIF_QUANTIZER_WORST_QUALITY; 97 | } else { 98 | return qvalue; 99 | } 100 | } 101 | #endif 102 | 103 | static int 104 | normalize_tiles_log2(int value) { 105 | if (value < 0) { 106 | return 0; 107 | } else if (value > 6) { 108 | return 6; 109 | } else { 110 | return value; 111 | } 112 | } 113 | 114 | static PyObject * 115 | exc_type_for_avif_result(avifResult result) { 116 | switch (result) { 117 | case AVIF_RESULT_INVALID_EXIF_PAYLOAD: 118 | case AVIF_RESULT_INVALID_CODEC_SPECIFIC_OPTION: 119 | return PyExc_ValueError; 120 | case AVIF_RESULT_INVALID_FTYP: 121 | case AVIF_RESULT_BMFF_PARSE_FAILED: 122 | case AVIF_RESULT_TRUNCATED_DATA: 123 | case AVIF_RESULT_NO_CONTENT: 124 | return PyExc_SyntaxError; 125 | default: 126 | return PyExc_RuntimeError; 127 | } 128 | } 129 | 130 | static uint8_t 131 | irot_imir_to_exif_orientation(const avifImage *image) { 132 | uint8_t axis; 133 | #if AVIF_VERSION_MAJOR >= 1 134 | axis = image->imir.axis; 135 | #else 136 | axis = image->imir.mode; 137 | #endif 138 | int imir = image->transformFlags & AVIF_TRANSFORM_IMIR; 139 | int irot = image->transformFlags & AVIF_TRANSFORM_IROT; 140 | if (irot) { 141 | uint8_t angle = image->irot.angle; 142 | if (angle == 1) { 143 | if (imir) { 144 | return axis ? 7 // 90 degrees anti-clockwise then swap left and right. 145 | : 5; // 90 degrees anti-clockwise then swap top and bottom. 146 | } 147 | return 6; // 90 degrees anti-clockwise. 148 | } 149 | if (angle == 2) { 150 | if (imir) { 151 | return axis 152 | ? 4 // 180 degrees anti-clockwise then swap left and right. 153 | : 2; // 180 degrees anti-clockwise then swap top and bottom. 154 | } 155 | return 3; // 180 degrees anti-clockwise. 156 | } 157 | if (angle == 3) { 158 | if (imir) { 159 | return axis 160 | ? 5 // 270 degrees anti-clockwise then swap left and right. 161 | : 7; // 270 degrees anti-clockwise then swap top and bottom. 162 | } 163 | return 8; // 270 degrees anti-clockwise. 164 | } 165 | } 166 | if (imir) { 167 | return axis ? 2 // Swap left and right. 168 | : 4; // Swap top and bottom. 169 | } 170 | return 1; // Default orientation ("top-left", no-op). 171 | } 172 | 173 | static void 174 | exif_orientation_to_irot_imir(avifImage *image, int orientation) { 175 | // Mapping from Exif orientation as defined in JEITA CP-3451C section 4.6.4.A 176 | // Orientation to irot and imir boxes as defined in HEIF ISO/IEC 28002-12:2021 177 | // sections 6.5.10 and 6.5.12. 178 | switch (orientation) { 179 | case 2: // The 0th row is at the visual top of the image, and the 0th column is 180 | // the visual right-hand side. 181 | image->transformFlags |= AVIF_TRANSFORM_IMIR; 182 | #if AVIF_VERSION_MAJOR >= 1 183 | image->imir.axis = 1; 184 | #else 185 | image->imir.mode = 1; 186 | #endif 187 | break; 188 | case 3: // The 0th row is at the visual bottom of the image, and the 0th column 189 | // is the visual right-hand side. 190 | image->transformFlags |= AVIF_TRANSFORM_IROT; 191 | image->irot.angle = 2; 192 | break; 193 | case 4: // The 0th row is at the visual bottom of the image, and the 0th column 194 | // is the visual left-hand side. 195 | image->transformFlags |= AVIF_TRANSFORM_IMIR; 196 | break; 197 | case 5: // The 0th row is the visual left-hand side of the image, and the 0th 198 | // column is the visual top. 199 | image->transformFlags |= AVIF_TRANSFORM_IROT | AVIF_TRANSFORM_IMIR; 200 | image->irot.angle = 1; // applied before imir according to MIAF spec 201 | // ISO/IEC 28002-12:2021 - section 7.3.6.7 202 | break; 203 | case 6: // The 0th row is the visual right-hand side of the image, and the 0th 204 | // column is the visual top. 205 | image->transformFlags |= AVIF_TRANSFORM_IROT; 206 | image->irot.angle = 3; 207 | break; 208 | case 7: // The 0th row is the visual right-hand side of the image, and the 0th 209 | // column is the visual bottom. 210 | image->transformFlags |= AVIF_TRANSFORM_IROT | AVIF_TRANSFORM_IMIR; 211 | image->irot.angle = 3; // applied before imir according to MIAF spec 212 | // ISO/IEC 28002-12:2021 - section 7.3.6.7 213 | break; 214 | case 8: // The 0th row is the visual left-hand side of the image, and the 0th 215 | // column is the visual bottom. 216 | image->transformFlags |= AVIF_TRANSFORM_IROT; 217 | image->irot.angle = 1; 218 | break; 219 | } 220 | } 221 | 222 | static int 223 | _codec_available(const char *name, avifCodecFlags flags) { 224 | avifCodecChoice codec = avifCodecChoiceFromName(name); 225 | if (codec == AVIF_CODEC_CHOICE_AUTO) { 226 | return 0; 227 | } 228 | const char *codec_name = avifCodecName(codec, flags); 229 | return (codec_name == NULL) ? 0 : 1; 230 | } 231 | 232 | PyObject * 233 | _decoder_codec_available(PyObject *self, PyObject *args) { 234 | char *codec_name; 235 | if (!PyArg_ParseTuple(args, "s", &codec_name)) { 236 | return NULL; 237 | } 238 | int is_available = _codec_available(codec_name, AVIF_CODEC_FLAG_CAN_DECODE); 239 | return PyBool_FromLong(is_available); 240 | } 241 | 242 | PyObject * 243 | _encoder_codec_available(PyObject *self, PyObject *args) { 244 | char *codec_name; 245 | if (!PyArg_ParseTuple(args, "s", &codec_name)) { 246 | return NULL; 247 | } 248 | int is_available = _codec_available(codec_name, AVIF_CODEC_FLAG_CAN_ENCODE); 249 | return PyBool_FromLong(is_available); 250 | } 251 | 252 | #if AVIF_VERSION >= 80200 253 | static int 254 | _add_codec_specific_options(avifEncoder *encoder, PyObject *opts) { 255 | Py_ssize_t i, size; 256 | PyObject *keyval, *py_key, *py_val; 257 | if (!PyTuple_Check(opts)) { 258 | PyErr_SetString(PyExc_ValueError, "Invalid advanced codec options"); 259 | return 1; 260 | } 261 | size = PyTuple_GET_SIZE(opts); 262 | 263 | for (i = 0; i < size; i++) { 264 | keyval = PyTuple_GetItem(opts, i); 265 | if (!PyTuple_Check(keyval) || PyTuple_GET_SIZE(keyval) != 2) { 266 | PyErr_SetString(PyExc_ValueError, "Invalid advanced codec options"); 267 | return 1; 268 | } 269 | py_key = PyTuple_GetItem(keyval, 0); 270 | py_val = PyTuple_GetItem(keyval, 1); 271 | if (!PyBytes_Check(py_key) || !PyBytes_Check(py_val)) { 272 | PyErr_SetString(PyExc_ValueError, "Invalid advanced codec options"); 273 | return 1; 274 | } 275 | const char *key = PyBytes_AsString(py_key); 276 | const char *val = PyBytes_AsString(py_val); 277 | if (key == NULL || val == NULL) { 278 | PyErr_SetString(PyExc_ValueError, "Invalid advanced codec options"); 279 | return 1; 280 | } 281 | 282 | #if AVIF_VERSION < 1000000 283 | avifEncoderSetCodecSpecificOption(encoder, key, val); 284 | #else 285 | avifResult result = avifEncoderSetCodecSpecificOption(encoder, key, val); 286 | if (result != AVIF_RESULT_OK) { 287 | PyErr_Format( 288 | exc_type_for_avif_result(result), 289 | "Setting advanced codec options failed: %s", 290 | avifResultToString(result)); 291 | return 1; 292 | } 293 | #endif 294 | } 295 | return 0; 296 | } 297 | #endif 298 | 299 | // Encoder functions 300 | PyObject * 301 | AvifEncoderNew(PyObject *self_, PyObject *args) { 302 | unsigned int width, height; 303 | AvifEncoderObject *self = NULL; 304 | avifEncoder *encoder = NULL; 305 | 306 | char *subsampling; 307 | int quality; 308 | int qmin; 309 | int qmax; 310 | int speed; 311 | int exif_orientation; 312 | int max_threads; 313 | PyObject *icc_bytes; 314 | PyObject *exif_bytes; 315 | PyObject *xmp_bytes; 316 | PyObject *alpha_premultiplied; 317 | PyObject *autotiling; 318 | int tile_rows_log2; 319 | int tile_cols_log2; 320 | 321 | char *codec; 322 | char *range; 323 | 324 | PyObject *advanced; 325 | int error = 0; 326 | 327 | if (!PyArg_ParseTuple( 328 | args, 329 | "IIsiiiiissiiOOSSiSO", 330 | &width, 331 | &height, 332 | &subsampling, 333 | &qmin, 334 | &qmax, 335 | &quality, 336 | &speed, 337 | &max_threads, 338 | &codec, 339 | &range, 340 | &tile_rows_log2, 341 | &tile_cols_log2, 342 | &alpha_premultiplied, 343 | &autotiling, 344 | &icc_bytes, 345 | &exif_bytes, 346 | &exif_orientation, 347 | &xmp_bytes, 348 | &advanced)) { 349 | return NULL; 350 | } 351 | 352 | // Create a new animation encoder and picture frame 353 | avifImage *image = avifImageCreateEmpty(); 354 | if (image == NULL) { 355 | PyErr_SetString(PyExc_ValueError, "Image creation failed"); 356 | error = 1; 357 | goto end; 358 | } 359 | 360 | // Set these in advance so any upcoming RGB -> YUV use the proper coefficients 361 | if (strcmp(range, "full") == 0) { 362 | image->yuvRange = AVIF_RANGE_FULL; 363 | } else if (strcmp(range, "limited") == 0) { 364 | image->yuvRange = AVIF_RANGE_LIMITED; 365 | } else { 366 | PyErr_SetString(PyExc_ValueError, "Invalid range"); 367 | error = 1; 368 | goto end; 369 | } 370 | if (strcmp(subsampling, "4:0:0") == 0) { 371 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV400; 372 | } else if (strcmp(subsampling, "4:2:0") == 0) { 373 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV420; 374 | } else if (strcmp(subsampling, "4:2:2") == 0) { 375 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV422; 376 | } else if (strcmp(subsampling, "4:4:4") == 0) { 377 | image->yuvFormat = AVIF_PIXEL_FORMAT_YUV444; 378 | } else { 379 | PyErr_Format(PyExc_ValueError, "Invalid subsampling: %s", subsampling); 380 | error = 1; 381 | goto end; 382 | } 383 | 384 | // Validate canvas dimensions 385 | if (width == 0 || height == 0) { 386 | PyErr_SetString(PyExc_ValueError, "invalid canvas dimensions"); 387 | error = 1; 388 | goto end; 389 | } 390 | image->width = width; 391 | image->height = height; 392 | 393 | image->depth = 8; 394 | #if AVIF_VERSION >= 90000 395 | image->alphaPremultiplied = alpha_premultiplied == Py_True ? AVIF_TRUE : AVIF_FALSE; 396 | #endif 397 | 398 | encoder = avifEncoderCreate(); 399 | if (!encoder) { 400 | PyErr_SetString(PyExc_MemoryError, "Can't allocate encoder"); 401 | error = 1; 402 | goto end; 403 | } 404 | 405 | if (max_threads == 0) { 406 | if (default_max_threads == 0) { 407 | init_max_threads(); 408 | } 409 | max_threads = default_max_threads; 410 | } 411 | 412 | int is_aom_encode = strcmp(codec, "aom") == 0 || 413 | (strcmp(codec, "auto") == 0 && 414 | _codec_available("aom", AVIF_CODEC_FLAG_CAN_ENCODE)); 415 | encoder->maxThreads = is_aom_encode && max_threads > 64 ? 64 : max_threads; 416 | 417 | #if AVIF_VERSION < 1000000 418 | if (qmin != -1 && qmax != -1) { 419 | encoder->minQuantizer = qmin; 420 | encoder->maxQuantizer = qmax; 421 | } else { 422 | encoder->minQuantizer = normalize_quantize_value(64 - quality); 423 | encoder->maxQuantizer = normalize_quantize_value(100 - quality); 424 | } 425 | #else 426 | encoder->quality = quality; 427 | #endif 428 | 429 | if (strcmp(codec, "auto") == 0) { 430 | encoder->codecChoice = AVIF_CODEC_CHOICE_AUTO; 431 | } else { 432 | encoder->codecChoice = avifCodecChoiceFromName(codec); 433 | } 434 | 435 | if (speed < AVIF_SPEED_SLOWEST) { 436 | speed = AVIF_SPEED_SLOWEST; 437 | } else if (speed > AVIF_SPEED_FASTEST) { 438 | speed = AVIF_SPEED_FASTEST; 439 | } 440 | encoder->speed = speed; 441 | encoder->timescale = (uint64_t)1000; 442 | 443 | #if AVIF_VERSION >= 110000 444 | if (PyObject_IsTrue(autotiling)) { 445 | encoder->autoTiling = AVIF_TRUE; 446 | } else { 447 | encoder->autoTiling = AVIF_FALSE; 448 | encoder->tileRowsLog2 = normalize_tiles_log2(tile_rows_log2); 449 | encoder->tileColsLog2 = normalize_tiles_log2(tile_cols_log2); 450 | } 451 | #else 452 | encoder->tileRowsLog2 = normalize_tiles_log2(tile_rows_log2); 453 | encoder->tileColsLog2 = normalize_tiles_log2(tile_cols_log2); 454 | #endif 455 | 456 | if (advanced != Py_None) { 457 | #if AVIF_VERSION >= 80200 458 | if (_add_codec_specific_options(encoder, advanced)) { 459 | error = 1; 460 | goto end; 461 | } 462 | #else 463 | PyErr_SetString( 464 | PyExc_ValueError, "Advanced codec options require libavif >= 0.8.2"); 465 | error = 1; 466 | goto end; 467 | #endif 468 | } 469 | 470 | self = PyObject_New(AvifEncoderObject, &AvifEncoder_Type); 471 | if (!self) { 472 | PyErr_SetString(PyExc_RuntimeError, "could not create encoder object"); 473 | error = 1; 474 | goto end; 475 | } 476 | self->first_frame = 1; 477 | self->icc_bytes = NULL; 478 | self->exif_bytes = NULL; 479 | self->xmp_bytes = NULL; 480 | 481 | avifResult result; 482 | if (PyBytes_GET_SIZE(icc_bytes)) { 483 | self->icc_bytes = icc_bytes; 484 | Py_INCREF(icc_bytes); 485 | #if AVIF_VERSION < 1000000 486 | avifImageSetProfileICC( 487 | image, 488 | (uint8_t *)PyBytes_AS_STRING(icc_bytes), 489 | PyBytes_GET_SIZE(icc_bytes)); 490 | #else 491 | result = avifImageSetProfileICC( 492 | image, 493 | (uint8_t *)PyBytes_AS_STRING(icc_bytes), 494 | PyBytes_GET_SIZE(icc_bytes)); 495 | if (result != AVIF_RESULT_OK) { 496 | PyErr_Format( 497 | exc_type_for_avif_result(result), 498 | "Setting ICC profile failed: %s", 499 | avifResultToString(result)); 500 | error = 1; 501 | goto end; 502 | } 503 | #endif 504 | // colorPrimaries and transferCharacteristics are ignored when an ICC 505 | // profile is present, so set them to UNSPECIFIED. 506 | image->colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED; 507 | image->transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED; 508 | } else { 509 | image->colorPrimaries = AVIF_COLOR_PRIMARIES_BT709; 510 | image->transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_SRGB; 511 | } 512 | image->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT601; 513 | 514 | if (PyBytes_GET_SIZE(exif_bytes)) { 515 | self->exif_bytes = exif_bytes; 516 | Py_INCREF(exif_bytes); 517 | #if AVIF_VERSION < 1000000 518 | avifImageSetMetadataExif( 519 | image, 520 | (uint8_t *)PyBytes_AS_STRING(exif_bytes), 521 | PyBytes_GET_SIZE(exif_bytes)); 522 | #else 523 | result = avifImageSetMetadataExif( 524 | image, 525 | (uint8_t *)PyBytes_AS_STRING(exif_bytes), 526 | PyBytes_GET_SIZE(exif_bytes)); 527 | if (result != AVIF_RESULT_OK) { 528 | PyErr_Format( 529 | exc_type_for_avif_result(result), 530 | "Setting EXIF data failed: %s", 531 | avifResultToString(result)); 532 | error = 1; 533 | goto end; 534 | } 535 | #endif 536 | } 537 | 538 | if (PyBytes_GET_SIZE(xmp_bytes)) { 539 | self->xmp_bytes = xmp_bytes; 540 | Py_INCREF(xmp_bytes); 541 | #if AVIF_VERSION < 1000000 542 | avifImageSetMetadataXMP( 543 | image, 544 | (uint8_t *)PyBytes_AS_STRING(xmp_bytes), 545 | PyBytes_GET_SIZE(xmp_bytes)); 546 | #else 547 | result = avifImageSetMetadataXMP( 548 | image, 549 | (uint8_t *)PyBytes_AS_STRING(xmp_bytes), 550 | PyBytes_GET_SIZE(xmp_bytes)); 551 | if (result != AVIF_RESULT_OK) { 552 | PyErr_Format( 553 | exc_type_for_avif_result(result), 554 | "Setting XMP data failed: %s", 555 | avifResultToString(result)); 556 | error = 1; 557 | goto end; 558 | } 559 | #endif 560 | } 561 | 562 | if (exif_orientation) { 563 | exif_orientation_to_irot_imir(image, exif_orientation); 564 | } 565 | 566 | self->image = image; 567 | self->encoder = encoder; 568 | 569 | end: 570 | if (error) { 571 | if (image) { 572 | avifImageDestroy(image); 573 | } 574 | if (encoder) { 575 | avifEncoderDestroy(encoder); 576 | } 577 | if (self) { 578 | Py_XDECREF(self->icc_bytes); 579 | Py_XDECREF(self->exif_bytes); 580 | Py_XDECREF(self->xmp_bytes); 581 | PyObject_Del(self); 582 | } 583 | return NULL; 584 | } 585 | 586 | return (PyObject *)self; 587 | } 588 | 589 | PyObject * 590 | _encoder_dealloc(AvifEncoderObject *self) { 591 | if (self->encoder) { 592 | avifEncoderDestroy(self->encoder); 593 | } 594 | if (self->image) { 595 | avifImageDestroy(self->image); 596 | } 597 | Py_XDECREF(self->icc_bytes); 598 | Py_XDECREF(self->exif_bytes); 599 | Py_XDECREF(self->xmp_bytes); 600 | Py_RETURN_NONE; 601 | } 602 | 603 | PyObject * 604 | _encoder_add(AvifEncoderObject *self, PyObject *args) { 605 | uint8_t *rgb_bytes; 606 | Py_ssize_t size; 607 | unsigned int duration; 608 | unsigned int width; 609 | unsigned int height; 610 | char *mode; 611 | PyObject *is_single_frame = NULL; 612 | int error = 0; 613 | 614 | avifRGBImage rgb; 615 | avifResult result; 616 | 617 | avifEncoder *encoder = self->encoder; 618 | avifImage *image = self->image; 619 | avifImage *frame = NULL; 620 | 621 | if (!PyArg_ParseTuple( 622 | args, 623 | "z#IIIsO", 624 | (char **)&rgb_bytes, 625 | &size, 626 | &duration, 627 | &width, 628 | &height, 629 | &mode, 630 | &is_single_frame)) { 631 | return NULL; 632 | } 633 | 634 | if (image->width != width || image->height != height) { 635 | PyErr_Format( 636 | PyExc_ValueError, 637 | "Image sequence dimensions mismatch, %ux%u != %ux%u", 638 | image->width, 639 | image->height, 640 | width, 641 | height); 642 | return NULL; 643 | } 644 | 645 | if (self->first_frame) { 646 | // If we don't have an image populated with yuv planes, this is the first 647 | // frame 648 | frame = image; 649 | } else { 650 | frame = avifImageCreateEmpty(); 651 | if (image == NULL) { 652 | PyErr_SetString(PyExc_ValueError, "Image creation failed"); 653 | return NULL; 654 | } 655 | 656 | frame->width = width; 657 | frame->height = height; 658 | frame->colorPrimaries = image->colorPrimaries; 659 | frame->transferCharacteristics = image->transferCharacteristics; 660 | frame->matrixCoefficients = image->matrixCoefficients; 661 | frame->yuvRange = image->yuvRange; 662 | frame->yuvFormat = image->yuvFormat; 663 | frame->depth = image->depth; 664 | #if AVIF_VERSION >= 90000 665 | frame->alphaPremultiplied = image->alphaPremultiplied; 666 | #endif 667 | } 668 | 669 | avifRGBImageSetDefaults(&rgb, frame); 670 | 671 | if (strcmp(mode, "RGBA") == 0) { 672 | rgb.format = AVIF_RGB_FORMAT_RGBA; 673 | } else { 674 | rgb.format = AVIF_RGB_FORMAT_RGB; 675 | } 676 | 677 | #if AVIF_VERSION < 1000000 678 | avifRGBImageAllocatePixels(&rgb); 679 | #else 680 | result = avifRGBImageAllocatePixels(&rgb); 681 | if (result != AVIF_RESULT_OK) { 682 | PyErr_Format( 683 | exc_type_for_avif_result(result), 684 | "Pixel allocation failed: %s", 685 | avifResultToString(result)); 686 | error = 1; 687 | goto end; 688 | } 689 | #endif 690 | 691 | if (rgb.rowBytes * rgb.height != size) { 692 | PyErr_Format( 693 | PyExc_RuntimeError, 694 | "rgb data has incorrect size: %u * %u (%u) != %u", 695 | rgb.rowBytes, 696 | rgb.height, 697 | rgb.rowBytes * rgb.height, 698 | size); 699 | error = 1; 700 | goto end; 701 | } 702 | 703 | // rgb.pixels is safe for writes 704 | memcpy(rgb.pixels, rgb_bytes, size); 705 | 706 | Py_BEGIN_ALLOW_THREADS; 707 | result = avifImageRGBToYUV(frame, &rgb); 708 | Py_END_ALLOW_THREADS; 709 | 710 | if (result != AVIF_RESULT_OK) { 711 | PyErr_Format( 712 | exc_type_for_avif_result(result), 713 | "Conversion to YUV failed: %s", 714 | avifResultToString(result)); 715 | error = 1; 716 | goto end; 717 | } 718 | 719 | uint32_t addImageFlags = PyObject_IsTrue(is_single_frame) 720 | ? AVIF_ADD_IMAGE_FLAG_SINGLE 721 | : AVIF_ADD_IMAGE_FLAG_NONE; 722 | 723 | Py_BEGIN_ALLOW_THREADS; 724 | result = avifEncoderAddImage(encoder, frame, duration, addImageFlags); 725 | Py_END_ALLOW_THREADS; 726 | 727 | if (result != AVIF_RESULT_OK) { 728 | PyErr_Format( 729 | exc_type_for_avif_result(result), 730 | "Failed to encode image: %s", 731 | avifResultToString(result)); 732 | error = 1; 733 | goto end; 734 | } 735 | 736 | end: 737 | if (&rgb) { 738 | avifRGBImageFreePixels(&rgb); 739 | } 740 | if (!self->first_frame) { 741 | avifImageDestroy(frame); 742 | } 743 | 744 | if (error) { 745 | return NULL; 746 | } 747 | self->first_frame = 0; 748 | Py_RETURN_NONE; 749 | } 750 | 751 | PyObject * 752 | _encoder_finish(AvifEncoderObject *self) { 753 | avifEncoder *encoder = self->encoder; 754 | 755 | avifRWData raw = AVIF_DATA_EMPTY; 756 | avifResult result; 757 | PyObject *ret = NULL; 758 | 759 | Py_BEGIN_ALLOW_THREADS; 760 | result = avifEncoderFinish(encoder, &raw); 761 | Py_END_ALLOW_THREADS; 762 | 763 | if (result != AVIF_RESULT_OK) { 764 | PyErr_Format( 765 | exc_type_for_avif_result(result), 766 | "Failed to finish encoding: %s", 767 | avifResultToString(result)); 768 | avifRWDataFree(&raw); 769 | return NULL; 770 | } 771 | 772 | ret = PyBytes_FromStringAndSize((char *)raw.data, raw.size); 773 | 774 | avifRWDataFree(&raw); 775 | 776 | return ret; 777 | } 778 | 779 | // Decoder functions 780 | PyObject * 781 | AvifDecoderNew(PyObject *self_, PyObject *args) { 782 | PyObject *avif_bytes; 783 | AvifDecoderObject *self = NULL; 784 | avifDecoder *decoder; 785 | 786 | char *upsampling_str; 787 | char *codec_str; 788 | avifCodecChoice codec; 789 | avifChromaUpsampling upsampling; 790 | int max_threads = 0; 791 | 792 | avifResult result; 793 | 794 | if (!PyArg_ParseTuple( 795 | args, "Sssi", &avif_bytes, &codec_str, &upsampling_str, &max_threads)) { 796 | return NULL; 797 | } 798 | 799 | if (!strcmp(upsampling_str, "auto")) { 800 | upsampling = AVIF_CHROMA_UPSAMPLING_AUTOMATIC; 801 | } else if (!strcmp(upsampling_str, "fastest")) { 802 | upsampling = AVIF_CHROMA_UPSAMPLING_FASTEST; 803 | } else if (!strcmp(upsampling_str, "best")) { 804 | upsampling = AVIF_CHROMA_UPSAMPLING_BEST_QUALITY; 805 | } else if (!strcmp(upsampling_str, "nearest")) { 806 | upsampling = AVIF_CHROMA_UPSAMPLING_NEAREST; 807 | } else if (!strcmp(upsampling_str, "bilinear")) { 808 | upsampling = AVIF_CHROMA_UPSAMPLING_BILINEAR; 809 | } else { 810 | PyErr_Format(PyExc_ValueError, "Invalid upsampling option: %s", upsampling_str); 811 | return NULL; 812 | } 813 | 814 | if (strcmp(codec_str, "auto") == 0) { 815 | codec = AVIF_CODEC_CHOICE_AUTO; 816 | } else { 817 | codec = avifCodecChoiceFromName(codec_str); 818 | } 819 | 820 | self = PyObject_New(AvifDecoderObject, &AvifDecoder_Type); 821 | if (!self) { 822 | PyErr_SetString(PyExc_RuntimeError, "could not create decoder object"); 823 | return NULL; 824 | } 825 | 826 | self->upsampling = upsampling; 827 | 828 | decoder = avifDecoderCreate(); 829 | if (!decoder) { 830 | PyErr_SetString(PyExc_MemoryError, "Can't allocate decoder"); 831 | PyObject_Del(self); 832 | return NULL; 833 | } 834 | 835 | #if AVIF_VERSION >= 80400 836 | if (max_threads == 0) { 837 | if (default_max_threads == 0) { 838 | init_max_threads(); 839 | } 840 | max_threads = default_max_threads; 841 | } 842 | decoder->maxThreads = max_threads; 843 | #endif 844 | #if AVIF_VERSION >= 90200 845 | // Turn off libavif's 'clap' (clean aperture) property validation. 846 | decoder->strictFlags &= ~AVIF_STRICT_CLAP_VALID; 847 | // Allow the PixelInformationProperty ('pixi') to be missing in AV1 image 848 | // items. libheif v1.11.0 and older does not add the 'pixi' item property to 849 | // AV1 image items. 850 | decoder->strictFlags &= ~AVIF_STRICT_PIXI_REQUIRED; 851 | #endif 852 | decoder->codecChoice = codec; 853 | 854 | Py_INCREF(avif_bytes); 855 | 856 | result = avifDecoderSetIOMemory( 857 | decoder, 858 | (uint8_t *)PyBytes_AS_STRING(avif_bytes), 859 | PyBytes_GET_SIZE(avif_bytes)); 860 | 861 | if (result != AVIF_RESULT_OK) { 862 | PyErr_Format( 863 | exc_type_for_avif_result(result), 864 | "Setting IO memory failed: %s", 865 | avifResultToString(result)); 866 | avifDecoderDestroy(decoder); 867 | Py_XDECREF(avif_bytes); 868 | PyObject_Del(self); 869 | return NULL; 870 | } 871 | 872 | result = avifDecoderParse(decoder); 873 | if (result != AVIF_RESULT_OK) { 874 | PyErr_Format( 875 | exc_type_for_avif_result(result), 876 | "Failed to decode image: %s", 877 | avifResultToString(result)); 878 | avifDecoderDestroy(decoder); 879 | Py_XDECREF(avif_bytes); 880 | PyObject_Del(self); 881 | return NULL; 882 | } 883 | 884 | self->decoder = decoder; 885 | self->data = avif_bytes; 886 | 887 | return (PyObject *)self; 888 | } 889 | 890 | PyObject * 891 | _decoder_dealloc(AvifDecoderObject *self) { 892 | if (self->decoder) { 893 | avifDecoderDestroy(self->decoder); 894 | } 895 | Py_XDECREF(self->data); 896 | Py_RETURN_NONE; 897 | } 898 | 899 | PyObject * 900 | _decoder_get_info(AvifDecoderObject *self) { 901 | avifDecoder *decoder = self->decoder; 902 | avifImage *image = decoder->image; 903 | 904 | PyObject *icc = NULL; 905 | PyObject *exif = NULL; 906 | PyObject *xmp = NULL; 907 | PyObject *ret = NULL; 908 | 909 | if (image->xmp.size) { 910 | xmp = PyBytes_FromStringAndSize((const char *)image->xmp.data, image->xmp.size); 911 | } 912 | 913 | if (image->exif.size) { 914 | exif = 915 | PyBytes_FromStringAndSize((const char *)image->exif.data, image->exif.size); 916 | } 917 | 918 | if (image->icc.size) { 919 | icc = PyBytes_FromStringAndSize((const char *)image->icc.data, image->icc.size); 920 | } 921 | 922 | ret = Py_BuildValue( 923 | "IIIsSSIS", 924 | image->width, 925 | image->height, 926 | decoder->imageCount, 927 | decoder->alphaPresent ? "RGBA" : "RGB", 928 | NULL == icc ? Py_None : icc, 929 | NULL == exif ? Py_None : exif, 930 | irot_imir_to_exif_orientation(image), 931 | NULL == xmp ? Py_None : xmp); 932 | 933 | Py_XDECREF(xmp); 934 | Py_XDECREF(exif); 935 | Py_XDECREF(icc); 936 | 937 | return ret; 938 | } 939 | 940 | PyObject * 941 | _decoder_get_frame(AvifDecoderObject *self, PyObject *args) { 942 | PyObject *bytes; 943 | PyObject *ret; 944 | Py_ssize_t size; 945 | avifResult result; 946 | avifRGBImage rgb; 947 | avifDecoder *decoder; 948 | avifImage *image; 949 | uint32_t frame_index; 950 | 951 | decoder = self->decoder; 952 | 953 | if (!PyArg_ParseTuple(args, "I", &frame_index)) { 954 | return NULL; 955 | } 956 | 957 | result = avifDecoderNthImage(decoder, frame_index); 958 | if (result != AVIF_RESULT_OK) { 959 | PyErr_Format( 960 | exc_type_for_avif_result(result), 961 | "Failed to decode frame %u: %s", 962 | frame_index, 963 | avifResultToString(result)); 964 | return NULL; 965 | } 966 | 967 | image = decoder->image; 968 | 969 | avifRGBImageSetDefaults(&rgb, image); 970 | 971 | rgb.depth = 8; 972 | rgb.format = decoder->alphaPresent ? AVIF_RGB_FORMAT_RGBA : AVIF_RGB_FORMAT_RGB; 973 | rgb.chromaUpsampling = self->upsampling; 974 | 975 | #if AVIF_VERSION < 1000000 976 | avifRGBImageAllocatePixels(&rgb); 977 | #else 978 | result = avifRGBImageAllocatePixels(&rgb); 979 | if (result != AVIF_RESULT_OK) { 980 | PyErr_Format( 981 | exc_type_for_avif_result(result), 982 | "Pixel allocation failed: %s", 983 | avifResultToString(result)); 984 | return NULL; 985 | } 986 | #endif 987 | 988 | Py_BEGIN_ALLOW_THREADS; 989 | result = avifImageYUVToRGB(image, &rgb); 990 | Py_END_ALLOW_THREADS; 991 | 992 | if (result != AVIF_RESULT_OK) { 993 | PyErr_Format( 994 | exc_type_for_avif_result(result), 995 | "Conversion from YUV failed: %s", 996 | avifResultToString(result)); 997 | avifRGBImageFreePixels(&rgb); 998 | return NULL; 999 | } 1000 | 1001 | if (rgb.height > PY_SSIZE_T_MAX / rgb.rowBytes) { 1002 | PyErr_SetString(PyExc_MemoryError, "Integer overflow in pixel size"); 1003 | return NULL; 1004 | } 1005 | 1006 | size = rgb.rowBytes * rgb.height; 1007 | 1008 | bytes = PyBytes_FromStringAndSize((char *)rgb.pixels, size); 1009 | avifRGBImageFreePixels(&rgb); 1010 | 1011 | ret = Py_BuildValue( 1012 | "SKKK", 1013 | bytes, 1014 | (unsigned PY_LONG_LONG)decoder->timescale, 1015 | (unsigned PY_LONG_LONG)decoder->imageTiming.ptsInTimescales, 1016 | (unsigned PY_LONG_LONG)decoder->imageTiming.durationInTimescales); 1017 | 1018 | Py_DECREF(bytes); 1019 | 1020 | return ret; 1021 | } 1022 | 1023 | /* -------------------------------------------------------------------- */ 1024 | /* Type Definitions */ 1025 | /* -------------------------------------------------------------------- */ 1026 | 1027 | // AvifEncoder methods 1028 | static struct PyMethodDef _encoder_methods[] = { 1029 | {"add", (PyCFunction)_encoder_add, METH_VARARGS}, 1030 | {"finish", (PyCFunction)_encoder_finish, METH_NOARGS}, 1031 | {NULL, NULL} /* sentinel */ 1032 | }; 1033 | 1034 | // AvifDecoder type definition 1035 | static PyTypeObject AvifEncoder_Type = { 1036 | // clang-format off 1037 | PyVarObject_HEAD_INIT(NULL, 0) 1038 | .tp_name = "AvifEncoder", 1039 | // clang-format on 1040 | .tp_basicsize = sizeof(AvifEncoderObject), 1041 | .tp_dealloc = (destructor)_encoder_dealloc, 1042 | .tp_flags = Py_TPFLAGS_DEFAULT, 1043 | .tp_methods = _encoder_methods, 1044 | }; 1045 | 1046 | // AvifDecoder methods 1047 | static struct PyMethodDef _decoder_methods[] = { 1048 | {"get_info", (PyCFunction)_decoder_get_info, METH_NOARGS}, 1049 | {"get_frame", (PyCFunction)_decoder_get_frame, METH_VARARGS}, 1050 | {NULL, NULL} /* sentinel */ 1051 | }; 1052 | 1053 | // AvifDecoder type definition 1054 | static PyTypeObject AvifDecoder_Type = { 1055 | // clang-format off 1056 | PyVarObject_HEAD_INIT(NULL, 0) 1057 | .tp_name = "AvifDecoder", 1058 | // clang-format on 1059 | .tp_basicsize = sizeof(AvifDecoderObject), 1060 | .tp_itemsize = 0, 1061 | .tp_dealloc = (destructor)_decoder_dealloc, 1062 | .tp_flags = Py_TPFLAGS_DEFAULT, 1063 | .tp_methods = _decoder_methods, 1064 | }; 1065 | 1066 | PyObject * 1067 | AvifCodecVersions() { 1068 | char codecVersions[256]; 1069 | avifCodecVersions(codecVersions); 1070 | return PyUnicode_FromString(codecVersions); 1071 | } 1072 | 1073 | /* -------------------------------------------------------------------- */ 1074 | /* Module Setup */ 1075 | /* -------------------------------------------------------------------- */ 1076 | 1077 | #if PY_VERSION_HEX >= 0x03000000 1078 | #define MOD_ERROR_VAL NULL 1079 | #define MOD_SUCCESS_VAL(val) val 1080 | #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) 1081 | #define MOD_DEF(ob, name, methods) \ 1082 | static struct PyModuleDef moduledef = { \ 1083 | PyModuleDef_HEAD_INIT, \ 1084 | name, \ 1085 | NULL, \ 1086 | -1, \ 1087 | methods, \ 1088 | }; \ 1089 | ob = PyModule_Create(&moduledef); 1090 | #else 1091 | #define MOD_ERROR_VAL 1092 | #define MOD_SUCCESS_VAL(val) 1093 | #define MOD_INIT(name) void init##name(void) 1094 | #define MOD_DEF(ob, name, methods) ob = Py_InitModule(name, methods); 1095 | #endif 1096 | 1097 | static PyMethodDef avifMethods[] = { 1098 | {"AvifDecoder", AvifDecoderNew, METH_VARARGS}, 1099 | {"AvifEncoder", AvifEncoderNew, METH_VARARGS}, 1100 | {"AvifCodecVersions", AvifCodecVersions, METH_NOARGS}, 1101 | {"decoder_codec_available", _decoder_codec_available, METH_VARARGS}, 1102 | {"encoder_codec_available", _encoder_codec_available, METH_VARARGS}, 1103 | {NULL, NULL}}; 1104 | 1105 | static int 1106 | setup_module(PyObject *m) { 1107 | PyObject *d = PyModule_GetDict(m); 1108 | 1109 | PyObject *v = PyUnicode_FromString(avifVersion()); 1110 | if (PyDict_SetItemString(d, "libavif_version", v) < 0) { 1111 | Py_DECREF(v); 1112 | return -1; 1113 | } 1114 | Py_DECREF(v); 1115 | 1116 | v = Py_BuildValue( 1117 | "(iii)", AVIF_VERSION_MAJOR, AVIF_VERSION_MINOR, AVIF_VERSION_PATCH); 1118 | 1119 | if (PyDict_SetItemString(d, "VERSION", v) < 0) { 1120 | Py_DECREF(v); 1121 | return -1; 1122 | } 1123 | Py_DECREF(v); 1124 | 1125 | if (PyType_Ready(&AvifDecoder_Type) < 0 || PyType_Ready(&AvifEncoder_Type) < 0) { 1126 | return -1; 1127 | } 1128 | return 0; 1129 | } 1130 | 1131 | MOD_INIT(_avif) { 1132 | PyObject *m; 1133 | 1134 | MOD_DEF(m, "_avif", avifMethods) 1135 | 1136 | if (m == NULL || setup_module(m) < 0) { 1137 | return MOD_ERROR_VAL; 1138 | } 1139 | 1140 | #ifdef Py_GIL_DISABLED 1141 | PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); 1142 | #endif 1143 | 1144 | return MOD_SUCCESS_VAL(m); 1145 | } 1146 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/__init__.py -------------------------------------------------------------------------------- /tests/helper.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper functions (from Pillow). 3 | """ 4 | 5 | import gc 6 | from io import BytesIO 7 | import logging 8 | import os 9 | from struct import unpack 10 | import sys 11 | import tempfile 12 | 13 | import pytest 14 | 15 | from PIL import Image, ImageMath 16 | 17 | 18 | logger = logging.getLogger(__name__) 19 | CURR_DIR = os.path.dirname(os.path.dirname(__file__)) 20 | 21 | 22 | HAS_UPLOADER = False 23 | 24 | if os.environ.get("SHOW_ERRORS", None): 25 | # local img.show for errors. 26 | HAS_UPLOADER = True 27 | 28 | class test_image_results: 29 | @staticmethod 30 | def upload(a, b): 31 | a.show() 32 | b.show() 33 | 34 | elif "GITHUB_ACTIONS" in os.environ: 35 | HAS_UPLOADER = True 36 | 37 | class test_image_results: 38 | @staticmethod 39 | def upload(a, b): 40 | dir_errors = os.path.join(os.path.dirname(__file__), "errors") 41 | os.makedirs(dir_errors, exist_ok=True) 42 | tmpdir = tempfile.mkdtemp(dir=dir_errors) 43 | a.save(os.path.join(tmpdir, "a.png")) 44 | b.save(os.path.join(tmpdir, "b.png")) 45 | return tmpdir 46 | 47 | else: 48 | try: 49 | import test_image_results 50 | 51 | HAS_UPLOADER = True 52 | except ImportError: 53 | pass 54 | 55 | 56 | def convert_to_comparable(a, b): 57 | new_a, new_b = a, b 58 | if a.mode == "P": 59 | new_a = Image.new("L", a.size) 60 | new_b = Image.new("L", b.size) 61 | new_a.putdata(a.getdata()) 62 | new_b.putdata(b.getdata()) 63 | elif a.mode == "I;16": 64 | new_a = a.convert("I") 65 | new_b = b.convert("I") 66 | return new_a, new_b 67 | 68 | 69 | def assert_image(im, mode, size, msg=None): 70 | if mode is not None: 71 | assert im.mode == mode, msg or "got mode %r, expected %r" % (im.mode, mode) 72 | 73 | if size is not None: 74 | assert im.size == size, msg or "got size %r, expected %r" % (im.size, size) 75 | 76 | 77 | def assert_image_similar(a, b, epsilon, msg=None): 78 | assert a.mode == b.mode, msg or "got mode %r, expected %r" % (a.mode, b.mode) 79 | assert a.size == b.size, msg or "got size %r, expected %r" % (a.size, b.size) 80 | 81 | a, b = convert_to_comparable(a, b) 82 | 83 | diff = 0 84 | for ach, bch in zip(a.split(), b.split()): 85 | chdiff = ImageMath.eval("abs(a - b)", a=ach, b=bch).convert("L") 86 | diff += sum(i * num for i, num in enumerate(chdiff.histogram())) 87 | 88 | ave_diff = diff / (a.size[0] * a.size[1]) 89 | try: 90 | assert epsilon >= ave_diff, ( 91 | msg or "" 92 | ) + " average pixel value difference %.04f > epsilon %.04f" % ( 93 | ave_diff, 94 | epsilon, 95 | ) 96 | except Exception as e: 97 | if HAS_UPLOADER: 98 | try: 99 | url = test_image_results.upload(a, b) 100 | logger.error("Url for test images: %s" % url) 101 | except Exception: 102 | pass 103 | raise e 104 | 105 | 106 | def assert_image_similar_tofile(a, filename, epsilon, msg=None, mode=None): 107 | with Image.open(filename) as img: 108 | if mode: 109 | img = img.convert(mode) 110 | assert_image_similar(a, img, epsilon, msg) 111 | 112 | 113 | @pytest.mark.skipif(sys.platform.startswith("win32"), reason="Requires Unix or macOS") 114 | class PillowLeakTestCase: 115 | # requires unix/macOS 116 | iterations = 100 # count 117 | mem_limit = 512 # k 118 | 119 | def _get_mem_usage(self): 120 | """ 121 | Gets the RUSAGE memory usage, returns in K. Encapsulates the difference 122 | between macOS and Linux rss reporting 123 | 124 | :returns: memory usage in kilobytes 125 | """ 126 | 127 | from resource import RUSAGE_SELF, getrusage 128 | 129 | mem = getrusage(RUSAGE_SELF).ru_maxrss 130 | if sys.platform == "darwin": 131 | # man 2 getrusage: 132 | # ru_maxrss 133 | # This is the maximum resident set size utilized (in bytes). 134 | return mem / 1024 # Kb 135 | else: 136 | # linux 137 | # man 2 getrusage 138 | # ru_maxrss (since Linux 2.6.32) 139 | # This is the maximum resident set size used (in kilobytes). 140 | return mem # Kb 141 | 142 | def _test_leak(self, core): 143 | start_mem = self._get_mem_usage() 144 | for cycle in range(self.iterations): 145 | core() 146 | gc.collect() 147 | mem = self._get_mem_usage() - start_mem 148 | msg = "memory usage limit exceeded in iteration %s" % cycle 149 | assert mem < self.mem_limit, msg 150 | 151 | 152 | def hopper(mode=None, cache={}): 153 | if mode is None: 154 | # Always return fresh not-yet-loaded version of image. 155 | # Operations on not-yet-loaded images is separate class of errors 156 | # what we should catch. 157 | return Image.open("%s/tests/images/hopper.ppm" % CURR_DIR) 158 | # Use caching to reduce reading from disk but so an original copy is 159 | # returned each time and the cached image isn't modified by tests 160 | # (for fast, isolated, repeatable tests). 161 | im = cache.get(mode) 162 | if im is None: 163 | if mode == "F": 164 | im = hopper("L").convert(mode) 165 | elif mode[:4] == "I;16": 166 | im = hopper("I").convert(mode) 167 | else: 168 | im = hopper().convert(mode) 169 | cache[mode] = im 170 | return im.copy() 171 | 172 | 173 | def is_ascii(s): 174 | for char in s: 175 | if isinstance(char, str): 176 | char = ord(char) 177 | if char < 0x20 or char > 0x7E: 178 | return False 179 | return True 180 | 181 | 182 | def has_alpha_premultiplied(im_bytes): 183 | stream = BytesIO(im_bytes) 184 | length = len(im_bytes) 185 | while stream.tell() < length: 186 | start = stream.tell() 187 | size, boxtype = unpack(">L4s", stream.read(8)) 188 | if not is_ascii(boxtype): 189 | return False 190 | if size == 1: # 64bit size 191 | (size,) = unpack(">Q", stream.read(8)) 192 | end = start + size 193 | version, _ = unpack(">B3s", stream.read(4)) 194 | if boxtype in (b"ftyp", b"hdlr", b"pitm", b"iloc", b"iinf"): 195 | # Skip these boxes 196 | stream.seek(end) 197 | continue 198 | elif boxtype == b"meta": 199 | # Container box possibly including iref prem, continue to parse boxes 200 | # inside it 201 | continue 202 | elif boxtype == b"iref": 203 | while stream.tell() < end: 204 | _, iref_type = unpack(">L4s", stream.read(8)) 205 | version, _ = unpack(">B3s", stream.read(4)) 206 | if iref_type == b"prem": 207 | return True 208 | stream.read(2 if version == 0 else 4) 209 | else: 210 | return False 211 | return False 212 | -------------------------------------------------------------------------------- /tests/images/chi.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/chi.gif -------------------------------------------------------------------------------- /tests/images/chimera-missing-pixi.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/chimera-missing-pixi.avif -------------------------------------------------------------------------------- /tests/images/exif.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/exif.avif -------------------------------------------------------------------------------- /tests/images/flower.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/flower.jpg -------------------------------------------------------------------------------- /tests/images/hopper.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/hopper.avif -------------------------------------------------------------------------------- /tests/images/hopper.ppm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/hopper.ppm -------------------------------------------------------------------------------- /tests/images/hopper_avif_write.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/hopper_avif_write.png -------------------------------------------------------------------------------- /tests/images/icc_profile.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/icc_profile.avif -------------------------------------------------------------------------------- /tests/images/icc_profile_none.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/icc_profile_none.avif -------------------------------------------------------------------------------- /tests/images/rgba10.heif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/rgba10.heif -------------------------------------------------------------------------------- /tests/images/star.avifs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star.avifs -------------------------------------------------------------------------------- /tests/images/star.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star.gif -------------------------------------------------------------------------------- /tests/images/star.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star.png -------------------------------------------------------------------------------- /tests/images/star180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star180.png -------------------------------------------------------------------------------- /tests/images/star270.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star270.png -------------------------------------------------------------------------------- /tests/images/star90.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/star90.png -------------------------------------------------------------------------------- /tests/images/transparency.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/transparency.avif -------------------------------------------------------------------------------- /tests/images/xmp_tags_orientation.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/xmp_tags_orientation.avif -------------------------------------------------------------------------------- /tests/images/xmp_tags_orientation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fdintino/pillow-avif-plugin/412f829b4418291e49a6fd9e998aa70e965c4d6e/tests/images/xmp_tags_orientation.png -------------------------------------------------------------------------------- /tests/test_file_avif.py: -------------------------------------------------------------------------------- 1 | import os 2 | import xml.etree.ElementTree 3 | from contextlib import contextmanager 4 | from io import BytesIO 5 | import warnings 6 | 7 | try: 8 | from os import cpu_count 9 | except ImportError: 10 | from multiprocessing import cpu_count 11 | 12 | import pytest 13 | 14 | from PIL import Image, ImageDraw 15 | from pillow_avif import AvifImagePlugin 16 | 17 | from .helper import ( 18 | PillowLeakTestCase, 19 | assert_image, 20 | assert_image_similar, 21 | assert_image_similar_tofile, 22 | hopper, 23 | has_alpha_premultiplied, 24 | ) 25 | 26 | from pillow_avif import _avif 27 | 28 | try: 29 | from PIL import UnidentifiedImageError 30 | except ImportError: 31 | UnidentifiedImageError = None 32 | 33 | 34 | CURR_DIR = os.path.dirname(os.path.dirname(__file__)) 35 | TEST_AVIF_FILE = "%s/tests/images/hopper.avif" % CURR_DIR 36 | 37 | 38 | def assert_xmp_orientation(xmp, expected): 39 | assert isinstance(xmp, bytes) 40 | root = xml.etree.ElementTree.fromstring(xmp) 41 | orientation = None 42 | for elem in root.iter(): 43 | if elem.tag.endswith("}Description"): 44 | orientation = elem.attrib.get("{http://ns.adobe.com/tiff/1.0/}Orientation") 45 | if orientation: 46 | orientation = int(orientation) 47 | break 48 | assert orientation == expected 49 | 50 | 51 | def roundtrip(im, **options): 52 | out = BytesIO() 53 | im.save(out, "AVIF", **options) 54 | out.seek(0) 55 | return Image.open(out) 56 | 57 | 58 | def skip_unless_avif_decoder(codec_name): 59 | reason = "%s decode not available" % codec_name 60 | return pytest.mark.skipif( 61 | not _avif or not _avif.decoder_codec_available(codec_name), reason=reason 62 | ) 63 | 64 | 65 | def skip_unless_avif_encoder(codec_name): 66 | reason = "%s encode not available" % codec_name 67 | return pytest.mark.skipif( 68 | not _avif or not _avif.encoder_codec_available(codec_name), reason=reason 69 | ) 70 | 71 | 72 | def is_docker_qemu(): 73 | try: 74 | init_proc_exe = os.readlink("/proc/1/exe") 75 | except: # noqa: E722 76 | return False 77 | else: 78 | return "qemu" in init_proc_exe 79 | 80 | 81 | def skip_unless_avif_version_gte(version): 82 | if not _avif: 83 | reason = "AVIF unavailable" 84 | should_skip = True 85 | else: 86 | version_str = ".".join([str(v) for v in version]) 87 | reason = "%s < %s" % (_avif.libavif_version, version_str) 88 | should_skip = _avif.VERSION < version 89 | return pytest.mark.skipif(should_skip, reason=reason) 90 | 91 | 92 | def skip_unless_avif_version_lt(version): 93 | if not _avif: 94 | reason = "AVIF unavailable" 95 | should_skip = True 96 | else: 97 | version_str = ".".join([str(v) for v in version]) 98 | reason = "%s > %s" % (_avif.libavif_version, version_str) 99 | should_skip = _avif.VERSION >= version 100 | return pytest.mark.skipif(should_skip, reason=reason) 101 | 102 | 103 | class TestUnsupportedAvif: 104 | def test_unsupported(self): 105 | AvifImagePlugin.SUPPORTED = False 106 | 107 | try: 108 | file_path = "%s/tests/images/hopper.avif" % CURR_DIR 109 | if UnidentifiedImageError: 110 | pytest.warns( 111 | UserWarning, 112 | lambda: pytest.raises( 113 | UnidentifiedImageError, Image.open, file_path 114 | ), 115 | ) 116 | else: 117 | with pytest.raises(IOError): 118 | Image.open(file_path) 119 | finally: 120 | AvifImagePlugin.SUPPORTED = True 121 | 122 | 123 | class TestFileAvif: 124 | def test_version(self): 125 | _avif.AvifCodecVersions() 126 | 127 | def test_read(self): 128 | """ 129 | Can we read an AVIF file without error? 130 | Does it have the bits we expect? 131 | """ 132 | 133 | with Image.open("%s/tests/images/hopper.avif" % CURR_DIR) as image: 134 | assert image.mode == "RGB" 135 | assert image.size == (128, 128) 136 | assert image.format == "AVIF" 137 | assert image.get_format_mimetype() == "image/avif" 138 | image.load() 139 | image.getdata() 140 | 141 | # generated with: 142 | # avifdec hopper.avif hopper_avif_write.png 143 | assert_image_similar_tofile( 144 | image, "%s/tests/images/hopper_avif_write.png" % CURR_DIR, 12.0 145 | ) 146 | 147 | def _roundtrip(self, tmp_path, mode, epsilon, args={}): 148 | temp_file = str(tmp_path / "temp.avif") 149 | 150 | hopper(mode).save(temp_file, **args) 151 | with Image.open(temp_file) as image: 152 | assert image.mode == "RGB" 153 | assert image.size == (128, 128) 154 | assert image.format == "AVIF" 155 | image.load() 156 | image.getdata() 157 | 158 | if mode == "RGB": 159 | # avifdec hopper.avif avif/hopper_avif_write.png 160 | assert_image_similar_tofile( 161 | image, "%s/tests/images/hopper_avif_write.png" % CURR_DIR, 12.0 162 | ) 163 | 164 | # This test asserts that the images are similar. If the average pixel 165 | # difference between the two images is less than the epsilon value, 166 | # then we're going to accept that it's a reasonable lossy version of 167 | # the image. 168 | target = hopper(mode) 169 | if mode != "RGB": 170 | target = target.convert("RGB") 171 | assert_image_similar(image, target, epsilon) 172 | 173 | def test_write_rgb(self, tmp_path): 174 | """ 175 | Can we write a RGB mode file to avif without error? 176 | Does it have the bits we expect? 177 | """ 178 | 179 | self._roundtrip(tmp_path, "RGB", 12.5) 180 | 181 | def test_AvifEncoder_with_invalid_args(self): 182 | """ 183 | Calling encoder functions with no arguments should result in an error. 184 | """ 185 | with pytest.raises(TypeError): 186 | _avif.AvifEncoder() 187 | 188 | def test_AvifDecoder_with_invalid_args(self): 189 | """ 190 | Calling decoder functions with no arguments should result in an error. 191 | """ 192 | with pytest.raises(TypeError): 193 | _avif.AvifDecoder() 194 | 195 | @pytest.mark.parametrize("major_brand", [b"avif", b"avis", b"mif1", b"msf1"]) 196 | def test_accept_ftyp_brands(self, major_brand): 197 | data = b"\x00\x00\x00\x1cftyp%s\x00\x00\x00\x00" % major_brand 198 | assert AvifImagePlugin._accept(data) is True 199 | 200 | def test_no_resource_warning(self, tmp_path): 201 | with Image.open(TEST_AVIF_FILE) as image: 202 | temp_file = str(tmp_path / "temp.avif") 203 | with warnings.catch_warnings(): 204 | warnings.simplefilter("error") 205 | image.save(temp_file) 206 | 207 | def test_file_pointer_could_be_reused(self): 208 | with open(TEST_AVIF_FILE, "rb") as blob: 209 | Image.open(blob).load() 210 | Image.open(blob).load() 211 | 212 | def test_background_from_gif(self, tmp_path): 213 | with Image.open("%s/tests/images/chi.gif" % CURR_DIR) as im: 214 | original_value = im.convert("RGB").getpixel((1, 1)) 215 | 216 | # Save as AVIF 217 | out_avif = str(tmp_path / "temp.avif") 218 | im.save(out_avif, save_all=True) 219 | 220 | # Save as GIF 221 | out_gif = str(tmp_path / "temp.gif") 222 | Image.open(out_avif).save(out_gif) 223 | 224 | with Image.open(out_gif) as reread: 225 | reread_value = reread.convert("RGB").getpixel((1, 1)) 226 | difference = sum( 227 | [abs(original_value[i] - reread_value[i]) for i in range(0, 3)] 228 | ) 229 | assert difference <= 6 230 | 231 | def test_save_single_frame(self, tmp_path): 232 | temp_file = str(tmp_path / "temp.avif") 233 | with Image.open("%s/tests/images/chi.gif" % CURR_DIR) as im: 234 | # Save as AVIF 235 | im.save(temp_file) 236 | with Image.open(temp_file) as im: 237 | assert im.n_frames == 1 238 | 239 | def test_invalid_file(self): 240 | invalid_file = "tests/images/flower.jpg" 241 | 242 | with pytest.raises(SyntaxError): 243 | AvifImagePlugin.AvifImageFile(invalid_file) 244 | 245 | def test_load_transparent_rgb(self): 246 | test_file = "tests/images/transparency.avif" 247 | with Image.open(test_file) as im: 248 | assert_image(im, "RGBA", (64, 64)) 249 | 250 | # image has 876 transparent pixels 251 | assert im.getchannel("A").getcolors()[0][0] == 876 252 | 253 | def test_save_transparent(self, tmp_path): 254 | im = Image.new("RGBA", (10, 10), (0, 0, 0, 0)) 255 | assert im.getcolors() == [(100, (0, 0, 0, 0))] 256 | 257 | test_file = str(tmp_path / "temp.avif") 258 | im.save(test_file) 259 | 260 | # check if saved image contains same transparency 261 | with Image.open(test_file) as im: 262 | assert_image(im, "RGBA", (10, 10)) 263 | assert im.getcolors() == [(100, (0, 0, 0, 0))] 264 | 265 | def test_save_icc_profile(self): 266 | with Image.open("tests/images/icc_profile_none.avif") as im: 267 | assert im.info.get("icc_profile") is None 268 | 269 | with Image.open("tests/images/icc_profile.avif") as with_icc: 270 | expected_icc = with_icc.info.get("icc_profile") 271 | assert expected_icc is not None 272 | 273 | im = roundtrip(im, icc_profile=expected_icc) 274 | assert im.info["icc_profile"] == expected_icc 275 | 276 | def test_discard_icc_profile(self): 277 | with Image.open("tests/images/icc_profile.avif") as im: 278 | im = roundtrip(im, icc_profile=None) 279 | assert "icc_profile" not in im.info 280 | 281 | def test_roundtrip_icc_profile(self): 282 | with Image.open("tests/images/icc_profile.avif") as im: 283 | expected_icc = im.info["icc_profile"] 284 | 285 | im = roundtrip(im) 286 | assert im.info["icc_profile"] == expected_icc 287 | 288 | def test_roundtrip_no_icc_profile(self): 289 | with Image.open("tests/images/icc_profile_none.avif") as im: 290 | assert im.info.get("icc_profile") is None 291 | 292 | im = roundtrip(im) 293 | assert "icc_profile" not in im.info 294 | 295 | def test_exif(self): 296 | # With an EXIF chunk 297 | with Image.open("tests/images/exif.avif") as im: 298 | exif = im.getexif() 299 | assert exif[274] == 1 300 | 301 | def test_exif_save(self, tmp_path): 302 | with Image.open("tests/images/exif.avif") as im: 303 | test_file = str(tmp_path / "temp.avif") 304 | im.save(test_file) 305 | 306 | with Image.open(test_file) as reloaded: 307 | exif = reloaded.getexif() 308 | assert exif[274] == 1 309 | 310 | def test_exif_obj_argument(self, tmp_path): 311 | exif = Image.Exif() 312 | exif[274] = 1 313 | exif_data = exif.tobytes() 314 | with Image.open(TEST_AVIF_FILE) as im: 315 | test_file = str(tmp_path / "temp.avif") 316 | im.save(test_file, exif=exif) 317 | 318 | with Image.open(test_file) as reloaded: 319 | assert reloaded.info["exif"] == exif_data 320 | 321 | def test_exif_bytes_argument(self, tmp_path): 322 | exif = Image.Exif() 323 | exif[274] = 1 324 | exif_data = exif.tobytes() 325 | with Image.open(TEST_AVIF_FILE) as im: 326 | test_file = str(tmp_path / "temp.avif") 327 | im.save(test_file, exif=exif_data) 328 | 329 | with Image.open(test_file) as reloaded: 330 | assert reloaded.info["exif"] == exif_data 331 | 332 | def test_exif_invalid(self, tmp_path): 333 | with Image.open(TEST_AVIF_FILE) as im: 334 | test_file = str(tmp_path / "temp.avif") 335 | with pytest.raises(ValueError): 336 | im.save(test_file, exif=b"invalid") 337 | 338 | def test_xmp(self): 339 | with Image.open("tests/images/xmp_tags_orientation.avif") as im: 340 | xmp = im.info.get("xmp") 341 | assert_xmp_orientation(xmp, 3) 342 | 343 | def test_xmp_save(self, tmp_path): 344 | with Image.open("tests/images/xmp_tags_orientation.avif") as im: 345 | test_file = str(tmp_path / "temp.avif") 346 | im.save(test_file) 347 | 348 | with Image.open(test_file) as reloaded: 349 | xmp = reloaded.info.get("xmp") 350 | assert_xmp_orientation(xmp, 3) 351 | 352 | def test_xmp_save_from_png(self, tmp_path): 353 | with Image.open("tests/images/xmp_tags_orientation.png") as im: 354 | test_file = str(tmp_path / "temp.avif") 355 | im.save(test_file) 356 | 357 | with Image.open(test_file) as reloaded: 358 | xmp = reloaded.info.get("xmp") 359 | assert_xmp_orientation(xmp, 3) 360 | 361 | def test_xmp_save_argument(self, tmp_path): 362 | xmp_arg = "\n".join( 363 | [ 364 | '', 365 | '', 366 | ' ', 367 | ' ', 370 | " ", 371 | "", 372 | '', 373 | ] 374 | ) 375 | with Image.open("tests/images/hopper.avif") as im: 376 | test_file = str(tmp_path / "temp.avif") 377 | im.save(test_file, xmp=xmp_arg) 378 | 379 | with Image.open(test_file) as reloaded: 380 | xmp = reloaded.info.get("xmp") 381 | assert_xmp_orientation(xmp, 1) 382 | 383 | def test_tell(self): 384 | with Image.open(TEST_AVIF_FILE) as im: 385 | assert im.tell() == 0 386 | 387 | def test_seek(self): 388 | with Image.open(TEST_AVIF_FILE) as im: 389 | im.seek(0) 390 | 391 | with pytest.raises(EOFError): 392 | im.seek(1) 393 | 394 | @pytest.mark.parametrize("subsampling", ["4:4:4", "4:2:2", "4:0:0"]) 395 | def test_encoder_subsampling(self, tmp_path, subsampling): 396 | with Image.open(TEST_AVIF_FILE) as im: 397 | test_file = str(tmp_path / "temp.avif") 398 | im.save(test_file, subsampling=subsampling) 399 | 400 | def test_encoder_subsampling_invalid(self, tmp_path): 401 | with Image.open(TEST_AVIF_FILE) as im: 402 | test_file = str(tmp_path / "temp.avif") 403 | with pytest.raises(ValueError): 404 | im.save(test_file, subsampling="foo") 405 | 406 | def test_encoder_range(self, tmp_path): 407 | with Image.open(TEST_AVIF_FILE) as im: 408 | test_file = str(tmp_path / "temp.avif") 409 | im.save(test_file, range="limited") 410 | 411 | def test_encoder_range_invalid(self, tmp_path): 412 | with Image.open(TEST_AVIF_FILE) as im: 413 | test_file = str(tmp_path / "temp.avif") 414 | with pytest.raises(ValueError): 415 | im.save(test_file, range="foo") 416 | 417 | @skip_unless_avif_encoder("aom") 418 | def test_encoder_codec_param(self, tmp_path): 419 | with Image.open(TEST_AVIF_FILE) as im: 420 | test_file = str(tmp_path / "temp.avif") 421 | im.save(test_file, codec="aom") 422 | 423 | def test_encoder_codec_invalid(self, tmp_path): 424 | with Image.open(TEST_AVIF_FILE) as im: 425 | test_file = str(tmp_path / "temp.avif") 426 | with pytest.raises(ValueError): 427 | im.save(test_file, codec="foo") 428 | 429 | @skip_unless_avif_decoder("dav1d") 430 | def test_encoder_codec_cannot_encode(self, tmp_path): 431 | with Image.open(TEST_AVIF_FILE) as im: 432 | test_file = str(tmp_path / "temp.avif") 433 | with pytest.raises(ValueError): 434 | im.save(test_file, codec="dav1d") 435 | 436 | @skip_unless_avif_encoder("aom") 437 | @skip_unless_avif_version_gte((0, 8, 2)) 438 | def test_encoder_advanced_codec_options(self): 439 | with Image.open(TEST_AVIF_FILE) as im: 440 | ctrl_buf = BytesIO() 441 | im.save(ctrl_buf, "AVIF", codec="aom") 442 | test_buf = BytesIO() 443 | im.save( 444 | test_buf, 445 | "AVIF", 446 | codec="aom", 447 | advanced={ 448 | "aq-mode": "1", 449 | "enable-chroma-deltaq": "1", 450 | }, 451 | ) 452 | assert ctrl_buf.getvalue() != test_buf.getvalue() 453 | 454 | @skip_unless_avif_encoder("aom") 455 | @skip_unless_avif_version_gte((0, 8, 2)) 456 | @pytest.mark.parametrize("val", [{"foo": "bar"}, 1234]) 457 | def test_encoder_advanced_codec_options_invalid(self, tmp_path, val): 458 | with Image.open(TEST_AVIF_FILE) as im: 459 | test_file = str(tmp_path / "temp.avif") 460 | with pytest.raises(ValueError): 461 | im.save(test_file, codec="aom", advanced=val) 462 | 463 | @skip_unless_avif_decoder("aom") 464 | def test_decoder_codec_param(self): 465 | AvifImagePlugin.DECODE_CODEC_CHOICE = "aom" 466 | try: 467 | with Image.open(TEST_AVIF_FILE) as im: 468 | assert im.size == (128, 128) 469 | finally: 470 | AvifImagePlugin.DECODE_CODEC_CHOICE = "auto" 471 | 472 | @skip_unless_avif_encoder("rav1e") 473 | def test_decoder_codec_cannot_decode(self, tmp_path): 474 | AvifImagePlugin.DECODE_CODEC_CHOICE = "rav1e" 475 | try: 476 | with pytest.raises(ValueError): 477 | with Image.open(TEST_AVIF_FILE): 478 | pass 479 | finally: 480 | AvifImagePlugin.DECODE_CODEC_CHOICE = "auto" 481 | 482 | def test_decoder_codec_invalid(self): 483 | AvifImagePlugin.DECODE_CODEC_CHOICE = "foo" 484 | try: 485 | with pytest.raises(ValueError): 486 | with Image.open(TEST_AVIF_FILE): 487 | pass 488 | finally: 489 | AvifImagePlugin.DECODE_CODEC_CHOICE = "auto" 490 | 491 | @skip_unless_avif_encoder("aom") 492 | def test_encoder_codec_available(self): 493 | assert _avif.encoder_codec_available("aom") is True 494 | 495 | def test_encoder_codec_available_bad_params(self): 496 | with pytest.raises(TypeError): 497 | _avif.encoder_codec_available() 498 | 499 | @skip_unless_avif_encoder("dav1d") 500 | def test_encoder_codec_available_cannot_decode(self): 501 | assert _avif.encoder_codec_available("dav1d") is False 502 | 503 | def test_encoder_codec_available_invalid(self): 504 | assert _avif.encoder_codec_available("foo") is False 505 | 506 | @skip_unless_avif_version_lt((1, 0, 0)) 507 | @pytest.mark.parametrize( 508 | "quality,expected_qminmax", 509 | [ 510 | [0, (63, 63)], 511 | [100, (0, 0)], 512 | [90, (0, 10)], 513 | [None, (0, 25)], # default 514 | [50, (14, 50)], 515 | ], 516 | ) 517 | def test_encoder_quality_qmin_qmax_map(self, tmp_path, quality, expected_qminmax): 518 | qmin, qmax = expected_qminmax 519 | with Image.open("tests/images/hopper.avif") as im: 520 | out_quality = BytesIO() 521 | out_qminmax = BytesIO() 522 | im.save(out_qminmax, "AVIF", qmin=qmin, qmax=qmax) 523 | if quality is None: 524 | im.save(out_quality, "AVIF") 525 | else: 526 | im.save(out_quality, "AVIF", quality=quality) 527 | assert len(out_quality.getvalue()) == len(out_qminmax.getvalue()) 528 | 529 | def test_encoder_quality_valueerror(self, tmp_path): 530 | with Image.open("tests/images/hopper.avif") as im: 531 | test_file = str(tmp_path / "temp.avif") 532 | with pytest.raises(ValueError): 533 | im.save(test_file, quality="invalid") 534 | 535 | @skip_unless_avif_decoder("aom") 536 | def test_decoder_codec_available(self): 537 | assert _avif.decoder_codec_available("aom") is True 538 | 539 | def test_decoder_codec_available_bad_params(self): 540 | with pytest.raises(TypeError): 541 | _avif.decoder_codec_available() 542 | 543 | @skip_unless_avif_encoder("rav1e") 544 | def test_decoder_codec_available_cannot_decode(self): 545 | assert _avif.decoder_codec_available("rav1e") is False 546 | 547 | def test_decoder_codec_available_invalid(self): 548 | assert _avif.decoder_codec_available("foo") is False 549 | 550 | @pytest.mark.parametrize("upsampling", ["fastest", "best", "nearest", "bilinear"]) 551 | def test_decoder_upsampling(self, upsampling): 552 | AvifImagePlugin.CHROMA_UPSAMPLING = upsampling 553 | try: 554 | with Image.open(TEST_AVIF_FILE): 555 | pass 556 | finally: 557 | AvifImagePlugin.CHROMA_UPSAMPLING = "auto" 558 | 559 | def test_decoder_upsampling_invalid(self): 560 | AvifImagePlugin.CHROMA_UPSAMPLING = "foo" 561 | try: 562 | with pytest.raises(ValueError): 563 | with Image.open(TEST_AVIF_FILE): 564 | pass 565 | finally: 566 | AvifImagePlugin.CHROMA_UPSAMPLING = "auto" 567 | 568 | def test_p_mode_transparency(self): 569 | im = Image.new("P", size=(64, 64)) 570 | draw = ImageDraw.Draw(im) 571 | draw.rectangle(xy=[(0, 0), (32, 32)], fill=255) 572 | draw.rectangle(xy=[(32, 32), (64, 64)], fill=255) 573 | 574 | buf_png = BytesIO() 575 | im.save(buf_png, format="PNG", transparency=0) 576 | im_png = Image.open(buf_png) 577 | buf_out = BytesIO() 578 | im_png.save(buf_out, format="AVIF", quality=100) 579 | 580 | assert_image_similar(im_png.convert("RGBA"), Image.open(buf_out), 1) 581 | 582 | def test_decoder_strict_flags(self): 583 | # This would fail if full avif strictFlags were enabled 584 | with Image.open("%s/tests/images/chimera-missing-pixi.avif" % CURR_DIR) as im: 585 | assert im.size == (480, 270) 586 | 587 | @skip_unless_avif_encoder("aom") 588 | def test_aom_optimizations(self): 589 | im = hopper("RGB") 590 | buf = BytesIO() 591 | im.save(buf, format="AVIF", codec="aom", speed=1) 592 | 593 | @skip_unless_avif_encoder("svt") 594 | def test_svt_optimizations(self): 595 | im = hopper("RGB") 596 | buf = BytesIO() 597 | im.save(buf, format="AVIF", codec="svt", speed=1) 598 | 599 | 600 | class TestAvifAnimation: 601 | @contextmanager 602 | def star_frames(self): 603 | with Image.open("%s/tests/images/star.png" % CURR_DIR) as f1: 604 | with Image.open("%s/tests/images/star90.png" % CURR_DIR) as f2: 605 | with Image.open("%s/tests/images/star180.png" % CURR_DIR) as f3: 606 | with Image.open("%s/tests/images/star270.png" % CURR_DIR) as f4: 607 | yield [f1, f2, f3, f4] 608 | 609 | def test_n_frames(self): 610 | """ 611 | Ensure that AVIF format sets n_frames and is_animated attributes 612 | correctly. 613 | """ 614 | 615 | with Image.open("tests/images/hopper.avif") as im: 616 | assert im.n_frames == 1 617 | assert not im.is_animated 618 | 619 | with Image.open("tests/images/star.avifs") as im: 620 | assert im.n_frames == 5 621 | assert im.is_animated 622 | 623 | def test_write_animation_L(self, tmp_path): 624 | """ 625 | Convert an animated GIF to animated AVIF, then compare the frame 626 | count, and first and last frames to ensure they're visually similar. 627 | """ 628 | 629 | with Image.open("tests/images/star.gif") as orig: 630 | assert orig.n_frames > 1 631 | 632 | temp_file = str(tmp_path / "temp.avif") 633 | orig.save(temp_file, save_all=True) 634 | with Image.open(temp_file) as im: 635 | assert im.n_frames == orig.n_frames 636 | 637 | # Compare first and second-to-last frames to the original animated GIF 638 | orig.load() 639 | im.load() 640 | assert_image_similar(im.convert("RGB"), orig.convert("RGB"), 25.0) 641 | orig.seek(orig.n_frames - 2) 642 | im.seek(im.n_frames - 2) 643 | orig.load() 644 | im.load() 645 | assert_image_similar(im.convert("RGB"), orig.convert("RGB"), 25.0) 646 | 647 | def test_write_animation_RGB(self, tmp_path): 648 | """ 649 | Write an animated AVIF from RGB frames, and ensure the frames 650 | are visually similar to the originals. 651 | """ 652 | 653 | def check(temp_file): 654 | with Image.open(temp_file) as im: 655 | assert im.n_frames == 4 656 | 657 | # Compare first frame to original 658 | im.load() 659 | assert_image_similar(im, frame1.convert("RGBA"), 25.0) 660 | 661 | # Compare second frame to original 662 | im.seek(1) 663 | im.load() 664 | assert_image_similar(im, frame2.convert("RGBA"), 25.0) 665 | 666 | with self.star_frames() as frames: 667 | frame1 = frames[0] 668 | frame2 = frames[1] 669 | temp_file1 = str(tmp_path / "temp.avif") 670 | frames[0].copy().save(temp_file1, save_all=True, append_images=frames[1:]) 671 | check(temp_file1) 672 | 673 | # Tests appending using a generator 674 | def imGenerator(ims): 675 | for im in ims: 676 | yield im 677 | 678 | temp_file2 = str(tmp_path / "temp_generator.avif") 679 | frames[0].copy().save( 680 | temp_file2, 681 | save_all=True, 682 | append_images=imGenerator(frames[1:]), 683 | ) 684 | check(temp_file2) 685 | 686 | def test_sequence_dimension_mismatch_check(self, tmp_path): 687 | temp_file = str(tmp_path / "temp.avif") 688 | frame1 = Image.new("RGB", (100, 100)) 689 | frame2 = Image.new("RGB", (150, 150)) 690 | with pytest.raises(ValueError): 691 | frame1.save(temp_file, save_all=True, append_images=[frame2], duration=100) 692 | 693 | def test_heif_raises_unidentified_image_error(self): 694 | with pytest.raises(UnidentifiedImageError or IOError): 695 | with Image.open("tests/images/rgba10.heif"): 696 | pass 697 | 698 | @skip_unless_avif_version_gte((0, 9, 0)) 699 | @pytest.mark.parametrize("alpha_premultipled", [False, True]) 700 | def test_alpha_premultiplied_true(self, alpha_premultipled): 701 | im = Image.new("RGBA", (10, 10), (0, 0, 0, 0)) 702 | im_buf = BytesIO() 703 | im.save(im_buf, "AVIF", alpha_premultiplied=alpha_premultipled) 704 | im_bytes = im_buf.getvalue() 705 | assert has_alpha_premultiplied(im_bytes) is alpha_premultipled 706 | 707 | def test_timestamp_and_duration(self, tmp_path): 708 | """ 709 | Try passing a list of durations, and make sure the encoded 710 | timestamps and durations are correct. 711 | """ 712 | 713 | durations = [1, 10, 20, 30, 40] 714 | temp_file = str(tmp_path / "temp.avif") 715 | with self.star_frames() as frames: 716 | frames[0].save( 717 | temp_file, 718 | save_all=True, 719 | append_images=(frames[1:] + [frames[0]]), 720 | duration=durations, 721 | ) 722 | 723 | with Image.open(temp_file) as im: 724 | assert im.n_frames == 5 725 | assert im.is_animated 726 | 727 | # Check that timestamps and durations match original values specified 728 | ts = 0 729 | for frame in range(im.n_frames): 730 | im.seek(frame) 731 | im.load() 732 | assert im.info["duration"] == durations[frame] 733 | assert im.info["timestamp"] == ts 734 | ts += durations[frame] 735 | 736 | def test_seeking(self, tmp_path): 737 | """ 738 | Create an animated AVIF file, and then try seeking through frames in 739 | reverse-order, verifying the timestamps and durations are correct. 740 | """ 741 | 742 | dur = 33 743 | temp_file = str(tmp_path / "temp.avif") 744 | with self.star_frames() as frames: 745 | frames[0].save( 746 | temp_file, 747 | save_all=True, 748 | append_images=(frames[1:] + [frames[0]]), 749 | duration=dur, 750 | ) 751 | 752 | with Image.open(temp_file) as im: 753 | assert im.n_frames == 5 754 | assert im.is_animated 755 | 756 | # Traverse frames in reverse, checking timestamps and durations 757 | ts = dur * (im.n_frames - 1) 758 | for frame in reversed(range(im.n_frames)): 759 | im.seek(frame) 760 | im.load() 761 | assert im.info["duration"] == dur 762 | assert im.info["timestamp"] == ts 763 | ts -= dur 764 | 765 | def test_seek_errors(self): 766 | with Image.open("tests/images/star.avifs") as im: 767 | with pytest.raises(EOFError): 768 | im.seek(-1) 769 | 770 | with pytest.raises(EOFError): 771 | im.seek(42) 772 | 773 | 774 | if hasattr(os, "sched_getaffinity"): 775 | MAX_THREADS = len(os.sched_getaffinity(0)) 776 | else: 777 | MAX_THREADS = cpu_count() 778 | 779 | 780 | class TestAvifLeaks(PillowLeakTestCase): 781 | mem_limit = MAX_THREADS * 3 * 1024 782 | iterations = 100 783 | 784 | @pytest.mark.skipif( 785 | is_docker_qemu(), reason="Skipping on cross-architecture containers" 786 | ) 787 | def test_leak_load(self): 788 | with open(TEST_AVIF_FILE, "rb") as f: 789 | im_data = f.read() 790 | 791 | def core(): 792 | with Image.open(BytesIO(im_data)) as im: 793 | im.load() 794 | 795 | self._test_leak(core) 796 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{27,37,38,39,310,311,312,py37} 3 | minversion = 1.9 4 | 5 | [gh-actions] 6 | problem_matcher = False 7 | python = 8 | 2.7: py27 9 | 3.7: py37 10 | 3.8: py38 11 | 3.9: py39 12 | 3.10: py310 13 | 3.11: py311 14 | 3.12: py312 15 | pypy-3.7: pypy37 16 | 17 | [testenv] 18 | passenv = 19 | LDFLAGS 20 | CFLAGS 21 | LD_LIBRARY_PATH 22 | TERM 23 | CC 24 | use_develop = true 25 | skip_install = true 26 | setenv = 27 | CFLAGS = {env:CFLAGS: } -coverage -fprofile-dir={toxinidir} -ftest-coverage -fprofile-arcs 28 | LDFLAGS = {env:LDFLAGS: } --coverage 29 | GCNO_TARGET_DIR={envdir}/temp 30 | COVERAGE_FILE={toxworkdir}/coverage/.coverage.{envname} 31 | commands = 32 | {envpython} -m pip install -e . -v 33 | {envpython} -m pytest -W always {posargs: -vv --cov pillow_avif --cov tests --cov-report term} 34 | {envpython} -c "import os; os.path.exists('{toxworkdir}/coverage') or os.makedirs('{toxworkdir}/coverage')" 35 | - gcovr -r {toxinidir} --object-directory {envdir} -k \ 36 | --gcov-ignore-errors=no_working_dir_found \ 37 | -o {toxworkdir}/coverage/.gcov_coverage.{envname} 38 | deps = 39 | gcovr 40 | pytest 41 | packaging 42 | pytest-cov 43 | test-image-results 44 | pillow 45 | py27: mock 46 | 47 | [testenv:coverage-report] 48 | skip_install = true 49 | deps = coverage 50 | setenv=COVERAGE_FILE=.coverage 51 | changedir = {toxworkdir}/coverage 52 | commands = 53 | coverage combine 54 | coverage report 55 | coverage xml -o {toxinidir}/coverage.xml 56 | 57 | [testenv:codecov] 58 | skip_install = true 59 | deps = codecov 60 | depends = coverage-report 61 | passenv = CODECOV_TOKEN 62 | changedir = {toxinidir} 63 | commands = 64 | codecov -X gcov --file {toxinidir}/coverage.xml {posargs} 65 | -------------------------------------------------------------------------------- /wheelbuild/aom-2.0.2-manylinux1.patch: -------------------------------------------------------------------------------- 1 | From f2b8e3c99adbecb8aa2fc49cf4d0973210bc05e5 Mon Sep 17 00:00:00 2001 2 | From: Frankie Dintino 3 | Date: Wed, 7 Apr 2021 17:12:57 -0400 4 | Subject: [PATCH] Define _mm256_bsrli_epi128 for old glibc; remove version.pl version constraint 5 | 6 | --- 7 | aom_dsp/x86/intrapred_x86.h | 4 ++++ 8 | build/cmake/version.pl | 1 - 9 | 2 files changed, 4 insertions(+), 1 deletion(-) 10 | 11 | diff --git a/aom_dsp/x86/intrapred_x86.h b/aom_dsp/x86/intrapred_x86.h 12 | index b13f575..ddba5f0 100644 13 | --- a/aom_dsp/x86/intrapred_x86.h 14 | +++ b/aom_dsp/x86/intrapred_x86.h 15 | @@ -35,4 +35,8 @@ static INLINE __m128i dc_sum_32_sse2(const uint8_t *ref) { 16 | return _mm_add_epi16(x0, high); 17 | } 18 | 19 | +#if defined(__GNUC__) && __GNUC__ < 6 20 | + #define _mm256_bsrli_epi128(a, count) _mm256_srli_si256((a), (count)) 21 | +#endif 22 | + 23 | #endif // AOM_AOM_DSP_X86_INTRAPRED_X86_H_ 24 | -- 25 | 2.30.0 26 | 27 | diff --git a/build/cmake/version.pl b/build/cmake/version.pl 28 | index 7d23f2b..73c5f81 100755 29 | --- a/build/cmake/version.pl 30 | +++ b/build/cmake/version.pl 31 | @@ -11,7 +11,6 @@ 32 | ## 33 | use strict; 34 | use warnings; 35 | -use 5.010; 36 | use Getopt::Long; 37 | 38 | my $git_desc = ''; 39 | -- 40 | 2.30.0 41 | 42 | -------------------------------------------------------------------------------- /wheelbuild/aom-fix-stack-size.patch: -------------------------------------------------------------------------------- 1 | From e53da0b1bf2652896bed7b65929a1d8d0729d922 Mon Sep 17 00:00:00 2001 2 | From: Wan-Teh Chang 3 | Date: Thu, 27 Aug 2020 20:49:03 -0700 4 | Subject: [PATCH] Ensure thread stack size is at least 256 KB 5 | 6 | BUG=aomedia:2754 7 | 8 | Change-Id: Ia6e211f9b87bc2efe376e7b9f4adb11741850b18 9 | --- 10 | 11 | diff --git a/aom_util/aom_thread.c b/aom_util/aom_thread.c 12 | index a749a22..8411569 100644 13 | --- a/aom_util/aom_thread.c 14 | +++ b/aom_util/aom_thread.c 15 | @@ -133,16 +133,39 @@ 16 | goto Error; 17 | } 18 | if (pthread_cond_init(&worker->impl_->condition_, NULL)) { 19 | - pthread_mutex_destroy(&worker->impl_->mutex_); 20 | - goto Error; 21 | + goto Error1; 22 | } 23 | + pthread_attr_t *attr = NULL; 24 | +#if HAVE_PTHREAD_H 25 | + pthread_attr_t thread_attributes; 26 | + attr = &thread_attributes; 27 | + if (pthread_attr_init(attr)) { 28 | + goto Error2; 29 | + } 30 | + size_t stack_size; 31 | + if (pthread_attr_getstacksize(attr, &stack_size)) { 32 | + pthread_attr_destroy(attr); 33 | + goto Error2; 34 | + } 35 | + const size_t kMinStackSize = 256 * 1024; 36 | + if (stack_size < kMinStackSize && 37 | + pthread_attr_setstacksize(attr, kMinStackSize)) { 38 | + pthread_attr_destroy(attr); 39 | + goto Error2; 40 | + } 41 | +#endif // HAVE_PTHREAD_H 42 | pthread_mutex_lock(&worker->impl_->mutex_); 43 | - ok = !pthread_create(&worker->impl_->thread_, NULL, thread_loop, worker); 44 | + ok = !pthread_create(&worker->impl_->thread_, attr, thread_loop, worker); 45 | if (ok) worker->status_ = OK; 46 | pthread_mutex_unlock(&worker->impl_->mutex_); 47 | +#if HAVE_PTHREAD_H 48 | + pthread_attr_destroy(attr); 49 | +#endif 50 | if (!ok) { 51 | - pthread_mutex_destroy(&worker->impl_->mutex_); 52 | + Error2: 53 | pthread_cond_destroy(&worker->impl_->condition_); 54 | + Error1: 55 | + pthread_mutex_destroy(&worker->impl_->mutex_); 56 | Error: 57 | aom_free(worker->impl_); 58 | worker->impl_ = NULL; 59 | -------------------------------------------------------------------------------- /wheelbuild/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | if [[ "$MB_ML_VER" == "1" ]]; then 6 | DOCKER_IMAGE="fdintino/manylinux${MB_ML_VER}_$PLAT" 7 | fi 8 | 9 | if [[ "$MB_PYTHON_VERSION" == pypy3* ]]; then 10 | if [[ "$TRAVIS_OS_NAME" != "macos-latest" ]]; then 11 | DOCKER_TEST_IMAGE="multibuild/xenial_$PLAT" 12 | else 13 | MB_PYTHON_OSX_VER="10.9" 14 | fi 15 | elif [[ "$MB_PYTHON_VERSION" == "3.11" ]] && [[ "$PLAT" == "i686" ]]; then 16 | DOCKER_TEST_IMAGE="radarhere/bionic-$PLAT" 17 | fi 18 | 19 | if [[ "$MB_PYTHON_VERSION" == "2.7" ]]; then 20 | DOCKER_TEST_IMAGE="fdintino/trusty-multibuild:$PLAT" 21 | if [[ "$MB_ML_VER" == "2010" ]]; then 22 | DOCKER_IMAGE="quay.io/pypa/manylinux${MB_ML_VER}_$PLAT:2021-02-06-3d322a5" 23 | fi 24 | fi 25 | 26 | echo "::group::Install a virtualenv" 27 | source multibuild/common_utils.sh 28 | source multibuild/travis_steps.sh 29 | export LATEST_3p11="3.11.0rc2" 30 | # can't use default 7.3.1 on macOS due to https://foss.heptapod.net/pypy/pypy/-/issues/3229 31 | LATEST_PP_7p3=7.3.3 32 | python3 -m pip install virtualenv 33 | before_install 34 | echo "::endgroup::" 35 | 36 | echo "::group::Setup wheel installation" 37 | clean_code $REPO_DIR $BUILD_COMMIT 38 | build_wheel $REPO_DIR $PLAT 39 | ls -l "${GITHUB_WORKSPACE}/${WHEEL_SDIR}/" 40 | echo "::endgroup::" 41 | 42 | echo "::group::Test wheel" 43 | if [ "${PLAT}" == "arm64" ] && [ $(arch) != "arm64" ]; then 44 | echo "Skipping tests for non-natively-built Apple Silicon wheel" 45 | elif [[ "$MB_ML_LIBC" == "musllinux" ]] && [[ "$MB_PYTHON_VERSION" != "3.11" ]]&& [[ "$MB_PYTHON_VERSION" != "3.11" ]]; then 46 | echo "Skipping tests for CI issue with musl python < 3.11" 47 | else 48 | install_run $PLAT 49 | fi 50 | echo "::endgroup::" 51 | -------------------------------------------------------------------------------- /wheelbuild/config.sh: -------------------------------------------------------------------------------- 1 | # Used by multibuild for building wheels 2 | set -eo pipefail 3 | 4 | CONFIG_DIR=$(abspath $(dirname "${BASH_SOURCE[0]}")) 5 | 6 | ARCHIVE_SDIR=pillow-avif-plugin-depends 7 | LIBAVIF_VERSION=2d0204485a30446d82770c115e0a4d61e2819f23 8 | RAV1E_VERSION=0.7.1 9 | CCACHE_VERSION=4.10.2 10 | SCCACHE_VERSION=0.10.0 11 | export PERLBREWURL=https://raw.githubusercontent.com/gugod/App-perlbrew/release-0.92/perlbrew 12 | export GITHUB_ACTIONS=1 13 | export PYTHON_EXE="${PYTHON_EXE:-python}" 14 | export REPO_DIR=$(dirname $CONFIG_DIR) 15 | 16 | export PLAT="${AUDITWHEEL_ARCH:-${CIBW_ARCHS:-${PLAT}}}" 17 | 18 | # Convenience functions to run shell commands suppressed from "set -x" tracing 19 | shopt -s expand_aliases 20 | alias trace_on='{ set -x; } 2>/dev/null' 21 | alias trace_off='{ set +x; } 2>/dev/null' 22 | alias trace_suppress='{ [[ $- =~ .*x.* ]] && trace_enabled=1 || trace_enabled=0; set +x; } 2>/dev/null' 23 | alias trace_restore='{ [ $trace_enabled -eq 1 ] && trace_on || trace_off; } 2>/dev/null' 24 | 25 | if [ -n "$IS_MACOS" ] && [ -n "$MACOSX_DEPLOYMENT_TARGET" ]; then 26 | CFLAGS="${CFLAGS} -mmacosx-version-min=$MACOSX_DEPLOYMENT_TARGET" 27 | LDFLAGS="${LDFLAGS} -mmacosx-version-min=$MACOSX_DEPLOYMENT_TARGET" 28 | fi 29 | 30 | # Temporarily use old linker on macOS arm64. This fixes a bizarre bug where 31 | # an invalid instruction is being inserted into the middle of the libaom 32 | # function compute_stats_win5_neon 33 | if [ -n "$IS_MACOS" ] && [ "$PLAT" == "arm64" ]; then 34 | export LDFLAGS="${LDFLAGS} -ld64" 35 | fi 36 | 37 | mkdir -p "$BUILD_PREFIX/bin" 38 | export PATH="$PATH:$BUILD_PREFIX/bin" 39 | 40 | call_and_restore_trace() { 41 | local rc 42 | local force_trace 43 | if [[ "$1" == "-x" ]]; then 44 | force_trace=1 45 | shift 46 | fi 47 | "$@" 48 | rc=$? 49 | [ -n "$force_trace" ] && trace_on || trace_restore 50 | { return $rc; } 2>/dev/null 51 | } 52 | alias echo='trace_suppress; call_and_restore_trace builtin echo' 53 | 54 | function echo_if_gha() { 55 | [ -n "$GITHUB_ACTIONS" ] && builtin echo "$@" || true 56 | } 57 | 58 | GHA_ACTIVE_GROUP="" 59 | function __group_start_ { 60 | local was_active_group="$GHA_ACTIVE_GROUP" 61 | [ -n "$GHA_ACTIVE_GROUP" ] && echo_if_gha "::endgroup::" ||: 62 | GHA_ACTIVE_GROUP="1" 63 | echo_if_gha -n "::group::" 64 | } 65 | 66 | alias group_start='trace_suppress; __group_start_; call_and_restore_trace -x echo_if_gha' 67 | 68 | function __group_end_ { 69 | ACTIVE_GROUP="" 70 | echo_if_gha "::endgroup::" 71 | trace_off 72 | } 73 | 74 | alias group_end='trace_suppress; __group_end_' 75 | 76 | 77 | # If we're running in GitHub Actions, then send redirect stderr to 78 | # stdout to ensure that they are interleaved correctly 79 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 80 | exec 2>&1 81 | fi 82 | 83 | function require_package { 84 | local pkg=$1 85 | local pkgconfig=${PKGCONFIG:-pkg-config} 86 | if ! $pkgconfig --exists $pkg; then 87 | echo "$pkg failed to build" 88 | exit 1 89 | fi 90 | } 91 | 92 | function install_ccache { 93 | if [[ $(type -P ccache) ]]; then 94 | return 95 | fi 96 | mkdir -p $PWD/ccache 97 | if [ -e /parent-home ]; then 98 | ln -s $PWD/ccache /parent-home/.ccache 99 | fi 100 | if [ ! -e $HOME/.ccache ]; then 101 | ln -s $PWD/ccache $HOME/.ccache 102 | fi 103 | 104 | group_start "Install ccache" 105 | if [ -n "$IS_MACOS" ]; then 106 | local base_url="https://github.com/ccache/ccache/releases/download/v$CCACHE_VERSION" 107 | local archive_name="ccache-${CCACHE_VERSION}-darwin" 108 | fetch_unpack "${base_url}/${archive_name}.tar.gz" 109 | if [ -e "$archive_name/ccache" ]; then 110 | sudo cp "$archive_name/ccache" "/usr/local/bin/ccache" 111 | sudo chmod +x /usr/local/bin/ccache 112 | fi 113 | export CCACHE_CPP2=1 114 | elif [[ "$PLAT" == "x86_64" ]] && [[ $MB_ML_VER == "2014" ]]; then 115 | local base_url="https://github.com/ccache/ccache/releases/download/v$CCACHE_VERSION" 116 | local archive_name="ccache-${CCACHE_VERSION}-linux-x86_64" 117 | fetch_unpack "${base_url}/${archive_name}.tar.xz" 118 | if [ -e "$archive_name/ccache" ]; then 119 | cp "$archive_name/ccache" "/usr/local/bin/ccache" 120 | chmod +x /usr/local/bin/ccache 121 | fi 122 | elif [ -n "$IS_ALPINE" ]; then 123 | suppress apk add ccache 124 | else 125 | if [[ $MB_ML_VER == "_2_24" ]]; then 126 | # debian:9 based distro 127 | suppress apt-get install -y ccache 128 | elif [[ $MB_ML_VER == "2014" ]] && [[ "$PLAT" == "i686" ]]; then 129 | # There is no ccache rpm for el7.i686, but the one from EPEL 6 works fine 130 | yum install -y https://archives.fedoraproject.org/pub/archive/epel/6/i386/Packages/c/ccache-3.1.6-2.el6.i686.rpm 131 | else 132 | # centos based distro 133 | suppress yum_install epel-release 134 | suppress yum_install ccache 135 | fi 136 | fi 137 | group_end 138 | } 139 | 140 | function install_sccache { 141 | if [[ $(type -P sccache) ]]; then 142 | return 143 | fi 144 | group_start "Install sccache" 145 | local base_url="https://github.com/mozilla/sccache/releases/download/v$SCCACHE_VERSION" 146 | 147 | if [ -n "$IS_MACOS" ] && [ ! -e /usr/local/bin/sccache ]; then 148 | if [ "$PLAT" == "arm64" ]; then 149 | local archive_name="sccache-v${SCCACHE_VERSION}-aarch64-apple-darwin" 150 | else 151 | local archive_name="sccache-v${SCCACHE_VERSION}-x86_64-apple-darwin" 152 | fi 153 | fetch_unpack "${base_url}/${archive_name}.tar.gz" 154 | if [ -e "$archive_name/sccache" ]; then 155 | mkdir -p "$BUILD_PREFIX/bin" 156 | cp "$archive_name/sccache" "$BUILD_PREFIX/bin/sccache" 157 | chmod +x $BUILD_PREFIX/bin/sccache 158 | export USE_SCCACHE=1 159 | export SCCACHE_DIR=$PWD/sccache 160 | fi 161 | 162 | elif [ ! -e $BUILD_PREFIX/bin/sccache ]; then 163 | local archive_name="sccache-v${SCCACHE_VERSION}-${PLAT}-unknown-linux-musl" 164 | fetch_unpack "${base_url}/${archive_name}.tar.gz" 165 | if [ -e "$archive_name/sccache" ]; then 166 | mkdir -p "$BUILD_PREFIX/bin" 167 | cp "$archive_name/sccache" "$BUILD_PREFIX/bin/sccache" 168 | chmod +x $BUILD_PREFIX/bin/sccache 169 | export USE_SCCACHE=1 170 | export SCCACHE_DIR=$PWD/sccache 171 | fi 172 | fi 173 | group_end 174 | } 175 | 176 | function install_meson { 177 | if [ -e meson-stamp ]; then return; fi 178 | 179 | install_ninja 180 | 181 | group_start "Install meson" 182 | 183 | if [ -n "$IS_MACOS" ] && [ "$MB_PYTHON_VERSION" == "2.7" ]; then 184 | if [[ "$(uname -m)" == "x86_64" ]]; then 185 | HOMEBREW_PREFIX=/usr/local 186 | else 187 | HOMEBREW_PREFIX=/opt/homebrew 188 | fi 189 | $HOMEBREW_PREFIX/bin/brew install meson 190 | if [ ! -e $BUILD_PREFIX/bin/meson ]; then 191 | ln -s $HOMEBREW_PREFIX/bin/meson $BUILD_PREFIX/bin 192 | fi 193 | elif [ "$MB_PYTHON_VERSION" == "2.7" ]; then 194 | local python39_exe=$(cpython_path 3.9)/bin/python 195 | $python39_exe -m pip install meson 196 | local meson_exe=$(dirname $python39_exe)/meson 197 | if [ "$(id -u)" != "0" ]; then 198 | sudo ln -s $meson_exe /usr/local/bin 199 | else 200 | ln -s $meson_exe /usr/local/bin 201 | fi 202 | else 203 | $PYTHON_EXE -m pip install meson 204 | fi 205 | group_end 206 | 207 | touch meson-stamp 208 | } 209 | 210 | function install_ninja { 211 | if [ -e ninja-stamp ]; then return; fi 212 | 213 | group_start "Install ninja" 214 | 215 | if [ -n "$IS_MACOS" ]; then 216 | if [[ "$(uname -m)" == "x86_64" ]]; then 217 | HOMEBREW_PREFIX=/usr/local 218 | else 219 | HOMEBREW_PREFIX=/opt/homebrew 220 | fi 221 | $HOMEBREW_PREFIX/bin/brew install ninja 222 | if [ ! -e $BUILD_PREFIX/bin/ninja ]; then 223 | ln -s $HOMEBREW_PREFIX/bin/ninja $BUILD_PREFIX/bin 224 | fi 225 | else 226 | $PYTHON_EXE -m pip install ninja==1.11.1 227 | local ninja_exe=$(dirname $PYTHON_EXE)/ninja 228 | ln -s $ninja_exe /usr/local/bin/ninja-build 229 | fi 230 | 231 | group_end 232 | touch ninja-stamp 233 | } 234 | 235 | function build_rav1e { 236 | if [ -n "$IS_MACOS" ] && [ "$PLAT" == "arm64" ]; then 237 | librav1e_tgz=librav1e-${RAV1E_VERSION}-macos-aarch64.tar.gz 238 | elif [ -n "$IS_MACOS" ]; then 239 | librav1e_tgz=librav1e-${RAV1E_VERSION}-macos.tar.gz 240 | elif [ "$PLAT" == "aarch64" ]; then 241 | librav1e_tgz=librav1e-${RAV1E_VERSION}-linux-aarch64.tar.gz 242 | elif [ "$PLAT" == "i686" ]; then 243 | librav1e_tgz=librav1e-${RAV1E_VERSION}-linux-i686.tar.gz 244 | elif [ "$PLAT" == "x86_64" ]; then 245 | librav1e_tgz=librav1e-${RAV1E_VERSION}-linux-generic.tar.gz 246 | else 247 | return 248 | fi 249 | 250 | group_start "Build rav1e" 251 | 252 | curl -sLo - \ 253 | https://github.com/xiph/rav1e/releases/download/v$RAV1E_VERSION/$librav1e_tgz \ 254 | | tar -C $BUILD_PREFIX --exclude LICENSE -zxf - 255 | 256 | if [ ! -n "$IS_MACOS" ]; then 257 | sed -i 's/-lgcc_s/-lgcc_eh/g' "${BUILD_PREFIX}/lib/pkgconfig/rav1e.pc" 258 | rm -rf $BUILD_PREFIX/lib/librav1e*.so 259 | else 260 | rm -rf $BUILD_PREFIX/lib/librav1e*.dylib 261 | fi 262 | 263 | require_package rav1e 264 | 265 | group_end 266 | } 267 | 268 | function build_libavif { 269 | LIBAVIF_CMAKE_FLAGS=() 270 | 271 | if [ -n "$IS_MACOS" ]; then 272 | for pkg in webp jpeg-xl aom composer gd imagemagick libavif libheif php; do 273 | brew remove --ignore-dependencies $pkg ||: 274 | done 275 | fi 276 | which cmake 277 | cmake --version 278 | if [ -n "$IS_MACOS" ] && [ "$PLAT" == "arm64" ]; then 279 | # SVT-AV1 NEON intrinsics require macOS 14 280 | local macos_ver=$(sw_vers --productVersion | sed 's/\.[.0-9]*//') 281 | if [ "$macos_ver" -gt "13" ]; then 282 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_SVT=LOCAL) 283 | fi 284 | elif [ "$MB_ML_VER" != "1" ]; then 285 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_SVT=LOCAL) 286 | fi 287 | 288 | build_rav1e 289 | 290 | # Force libavif to treat system rav1e as if it were local 291 | if [ -e $BUILD_PREFIX/lib/librav1e.a ]; then 292 | mkdir -p /tmp/cmake/Modules 293 | cat < /tmp/cmake/Modules/Findrav1e.cmake 294 | add_library(rav1e::rav1e STATIC IMPORTED GLOBAL) 295 | set_target_properties(rav1e::rav1e PROPERTIES 296 | IMPORTED_LOCATION "$BUILD_PREFIX/lib/librav1e.a" 297 | AVIF_LOCAL ON 298 | INTERFACE_INCLUDE_DIRECTORIES "$BUILD_PREFIX/include/rav1e" 299 | ) 300 | EOF 301 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_RAV1E=ON -DCMAKE_MODULE_PATH=/tmp/cmake/Modules) 302 | else 303 | curl https://sh.rustup.rs -sSf | sh -s -- -y 304 | . "$HOME/.cargo/env" 305 | 306 | if [ -n "$IS_MACOS" ] && [ "$PLAT" == "arm64" ] && [[ "$(uname -m)" != "arm64" ]]; then 307 | # When cross-compiling to arm64 on macOS, install rust aarch64 target 308 | rustup target add --toolchain stable-x86_64-apple-darwin aarch64-apple-darwin 309 | fi 310 | 311 | if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then 312 | yum install -y perl 313 | if [[ "$MB_ML_VER" == 2014 ]]; then 314 | yum install -y perl-IPC-Cmd 315 | fi 316 | fi 317 | LIBAVIF_CMAKE_FLAGS+=(-DAVIF_CODEC_RAV1E=LOCAL) 318 | fi 319 | 320 | if [ -n "$IS_MACOS" ]; then 321 | # Prevent cmake from using @rpath in install id, so that delocate can 322 | # find and bundle the libavif dylib 323 | LIBAVIF_CMAKE_FLAGS+=(\ 324 | "-DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib" \ 325 | -DCMAKE_MACOSX_RPATH=OFF) 326 | if [ "$PLAT" == "arm64" ]; then 327 | LIBAVIF_CMAKE_FLAGS+=(-DCMAKE_TOOLCHAIN_FILE=$CONFIG_DIR/toolchain-arm64-macos.cmake) 328 | fi 329 | fi 330 | if [[ $(type -P ccache) ]]; then 331 | LIBAVIF_CMAKE_FLAGS+=(\ 332 | -DCMAKE_C_COMPILER_LAUNCHER=$(type -P ccache) \ 333 | -DCMAKE_CXX_COMPILER_LAUNCHER=$(type -P ccache)) 334 | fi 335 | 336 | group_start "Download libavif source" 337 | 338 | local libavif_archive="${LIBAVIF_VERSION}.tar.gz" 339 | if [[ "$LIBAVIF_VERSION" == *"."* ]]; then 340 | libavif_archive="v${libavif_archive}" 341 | fi 342 | 343 | local out_dir=$(fetch_unpack \ 344 | "https://github.com/AOMediaCodec/libavif/archive/$libavif_archive" \ 345 | "libavif-$LIBAVIF_VERSION.tar.gz") 346 | 347 | group_end 348 | 349 | if [[ $MB_ML_VER == "2010" ]]; then 350 | fetch_unpack https://storage.googleapis.com/aom-releases/libaom-3.8.1.tar.gz 351 | mv libaom-3.8.1 $out_dir/ext/aom 352 | fi 353 | 354 | group_start "Build libavif" 355 | 356 | mkdir -p $out_dir/build 357 | 358 | local build_type=MinSizeRel 359 | local lto=ON 360 | 361 | if [ -n "$IS_MACOS" ]; then 362 | # CMake with Clang only permits -flto=thin. To simply pass -flto, it must be done 363 | # through CFLAGS 364 | lto=OFF 365 | LIBAVIF_CMAKE_FLAGS+=( 366 | -DCMAKE_C_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto " \ 367 | -DCMAKE_CXX_FLAGS_MINSIZEREL="-Oz -DNDEBUG -flto" \ 368 | -DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,-S,-x,-dead_strip_dylibs" \ 369 | ) 370 | else 371 | if [[ "$MB_ML_VER" == 2014 ]] && [[ "$PLAT" == "x86_64" ]]; then 372 | build_type=Release 373 | fi 374 | LIBAVIF_CMAKE_FLAGS+=( 375 | -DCMAKE_SHARED_LINKER_FLAGS_INIT="-Wl,--strip-all,-z,relro,-z,now" \ 376 | ) 377 | fi 378 | 379 | (cd $out_dir/build \ 380 | && cmake .. \ 381 | -G "Ninja" \ 382 | -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX \ 383 | -DCMAKE_INSTALL_LIBDIR=$BUILD_PREFIX/lib \ 384 | -DCMAKE_INSTALL_NAME_DIR=$BUILD_PREFIX/lib \ 385 | -DBUILD_SHARED_LIBS=ON \ 386 | -DAVIF_LIBSHARPYUV=LOCAL \ 387 | -DAVIF_LIBYUV=LOCAL \ 388 | -DAVIF_CODEC_AOM=LOCAL \ 389 | -DAVIF_CODEC_DAV1D=LOCAL \ 390 | -DAVIF_CODEC_AOM_DECODE=OFF \ 391 | -DCONFIG_AV1_HIGHBITDEPTH=0 \ 392 | -DCMAKE_INTERPROCEDURAL_OPTIMIZATION=$lto \ 393 | -DCMAKE_C_VISIBILITY_PRESET=hidden \ 394 | -DCMAKE_CXX_VISIBILITY_PRESET=hidden \ 395 | -DCMAKE_BUILD_TYPE=$build_type \ 396 | "${LIBAVIF_CMAKE_FLAGS[@]}" \ 397 | && ninja -v install/strip) 398 | 399 | group_end 400 | } 401 | 402 | function build_nasm { 403 | group_start "Build nasm" 404 | local CC=$(type -P "${CC:-gcc}") 405 | local CXX=$(type -P "${CXX:-g++}") 406 | if [[ $(type -P ccache) ]]; then 407 | CC="$(type -P ccache) $CC" 408 | CXX="$(type -P ccache) $CXX" 409 | fi 410 | SCCACHE_DIR="$SCCACHE_DIR" CC="$CC" CXX="$CXX" build_simple nasm 2.16.01 https://gstreamer.freedesktop.org/src/mirror/ tar.xz 411 | group_end 412 | } 413 | 414 | function install_cmake { 415 | group_start "Install cmake" 416 | if [[ "$MB_ML_VER" == "1" ]]; then 417 | $PYTHON_EXE -m pip install 'cmake<3.23' 418 | elif [ "$MB_PYTHON_VERSION" == "2.7" ]; then 419 | $PYTHON_EXE -m pip install 'cmake==3.27.7' 420 | else 421 | $PYTHON_EXE -m pip install cmake 422 | fi 423 | group_end 424 | } 425 | 426 | function install_zlib { 427 | if [ ! -n "$IS_MACOS" ]; then 428 | group_start "Install zlib" 429 | build_zlib 430 | group_end 431 | fi 432 | } 433 | 434 | function build_openssl { 435 | if [ -e openssl-stamp ]; then return; fi 436 | group_start "Building openssl" 437 | if [[ "$MB_ML_VER" == "1" ]]; then 438 | # Install new Perl because OpenSSL configure scripts require > 5.10.0. 439 | curl -L http://cpanmin.us | perl - App::cpanminus 440 | cpanm File::Path 441 | cpanm parent 442 | curl -L https://install.perlbrew.pl | bash 443 | source $HOME/perl5/perlbrew/etc/bashrc 444 | perlbrew install -j 3 --notest perl-5.16.0 445 | perlbrew use perl-5.16.0 446 | fi 447 | fetch_unpack ${OPENSSL_DOWNLOAD_URL}/${OPENSSL_ROOT}.tar.gz 448 | check_sha256sum $ARCHIVE_SDIR/${OPENSSL_ROOT}.tar.gz ${OPENSSL_HASH} 449 | local CC=$(type -P "${CC:-gcc}") 450 | local CXX=$(type -P "${CXX:-g++}") 451 | if [[ $(type -P ccache) ]]; then 452 | CC="$(type -P ccache) $CC" 453 | CXX="$(type -P ccache) $CXX" 454 | fi 455 | (cd ${OPENSSL_ROOT} \ 456 | && CC="$CC" CXX="$CXX" ./config no-ssl2 no-shared no-tests -fPIC --prefix=$BUILD_PREFIX \ 457 | && SCCACHE_DIR="$SCCACHE_DIR" make -j4 \ 458 | && make install_sw) 459 | touch openssl-stamp 460 | group_end 461 | } 462 | 463 | function ensure_openssl { 464 | if [ ! -n "$IS_MACOS" ]; then 465 | group_start "Install openssl" 466 | if [ -n "$IS_ALPINE" ]; then 467 | apk add openssl-dev 468 | elif [[ $MB_ML_VER == "_2_24" ]]; then 469 | apt-get install -y libssl-dev 470 | else 471 | yum_install openssl-devel 472 | fi 473 | group_end 474 | fi 475 | } 476 | 477 | function ensure_sudo { 478 | if [ ! -e /usr/bin/sudo ]; then 479 | group_start "Install sudo" 480 | if [ -n "$IS_ALPINE" ]; then 481 | apk add sudo 482 | elif [[ $MB_ML_VER == "_2_24" ]]; then 483 | apt-get install -y sudo 484 | else 485 | yum_install sudo 486 | fi 487 | group_end 488 | fi 489 | } 490 | 491 | function append_licenses { 492 | group_start "Append licenses" 493 | local prefix="" 494 | if [ -e "$REPO_DIR" ]; then 495 | pushd $REPO_DIR 496 | fi 497 | for filename in wheelbuild/dependency_licenses/*.txt; do 498 | echo -e "\n\n----\n\n$(basename $filename | cut -f 1 -d '.')\n" | cat >> LICENSE 499 | cat $filename >> LICENSE 500 | done 501 | echo -e "\n\n" | cat >> LICENSE 502 | cat wheelbuild/dependency_licenses/PATENTS >> LICENSE 503 | if [ -e "$REPO_DIR" ]; then 504 | popd 505 | fi 506 | group_end 507 | } 508 | 509 | function pre_build { 510 | echo "::endgroup::" 511 | 512 | if [ -e /etc/yum.repos.d/CentOS-Base.repo ]; then 513 | sed -i -e '/^mirrorlist=http:\/\/mirrorlist.centos.org\// { s/^/#/ ; T }' \ 514 | -e '{ s/#baseurl=/baseurl=/ ; s/mirror\.centos\.org/vault.centos.org/ }' \ 515 | /etc/yum.repos.d/CentOS-*.repo 516 | if [ "$PLAT" == "aarch64" ]; then 517 | sed -i -e '{ s/vault\.centos\.org\/centos/vault.centos.org\/altarch/ }' \ 518 | /etc/yum.repos.d/CentOS-*.repo 519 | fi 520 | fi 521 | 522 | if [ "$MB_ML_VER" == "2010" ]; then 523 | yum install -y devtoolset-9-gcc-gfortran yum install devtoolset-9-gcc-c++ 524 | export PATH=/opt/rh/devtoolset-9/root/usr/bin:$PATH 525 | fi 526 | 527 | if [ -n "$IS_MACOS" ]; then 528 | sudo mkdir -p /usr/local/lib 529 | sudo mkdir -p /usr/local/bin 530 | sudo chown -R $(id -u):$(id -g) /usr/local ||: 531 | fi 532 | 533 | append_licenses 534 | ensure_sudo 535 | ensure_openssl 536 | install_zlib 537 | install_sccache 538 | install_ccache 539 | 540 | if [ "$PLAT" == "x86_64" ] || [ "$PLAT" == "i686" ]; then 541 | build_nasm 542 | fi 543 | install_cmake 544 | install_ninja 545 | install_meson 546 | 547 | if [[ -n "$IS_MACOS" ]]; then 548 | # clear bash path cache for curl 549 | hash -d curl ||: 550 | fi 551 | 552 | if [ -e $HOME/.cargo/env ]; then 553 | source $HOME/.cargo/env 554 | fi 555 | 556 | build_libavif 557 | 558 | if [ -z "$CIBW_ARCHS" ]; then 559 | echo "::group::Build wheel" 560 | fi 561 | } 562 | 563 | function run_tests { 564 | if ! $PYTHON_EXE -m unittest.mock 2>&1 2>/dev/null; then 565 | $PYTHON_EXE -m pip install mock 566 | fi 567 | # Runs tests on installed distribution from an empty directory 568 | (cd ../pillow-avif-plugin && pytest -v) 569 | } 570 | 571 | # Work around flakiness of pip install with python 2.7 572 | if [ "$MB_PYTHON_VERSION" == "2.7" ]; then 573 | function pip_install { 574 | if [ "$1" == "retry" ]; then 575 | shift 576 | echo "" 577 | echo Retrying pip install $@ 578 | else 579 | echo Running pip install $@ 580 | fi 581 | echo "" 582 | $PIP_CMD install $(pip_opts) $@ 583 | } 584 | 585 | function install_run { 586 | if [ -n "$TEST_DEPENDS" ]; then 587 | while read TEST_DEPENDENCY; do 588 | pip_install $TEST_DEPENDENCY \ 589 | || pip_install retry $TEST_DEPENDENCY \ 590 | || pip_install retry $TEST_DEPENDENCY \ 591 | || pip_install retry $TEST_DEPENDENCY 592 | done <<< "$TEST_DEPENDS" 593 | TEST_DEPENDS="" 594 | fi 595 | 596 | install_wheel 597 | mkdir tmp_for_test 598 | (cd tmp_for_test && run_tests) 599 | rmdir tmp_for_test 2>/dev/null || echo "Cannot remove tmp_for_test" 600 | } 601 | fi 602 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/AOM.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Alliance for Open Media. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions 5 | are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the 13 | distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 18 | FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 19 | COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 20 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 25 | ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 26 | POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/DAV1D.txt: -------------------------------------------------------------------------------- 1 | Copyright © 2018-2019, VideoLAN and dav1d authors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 15 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 16 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 18 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 19 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 20 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 21 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 23 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/LIBGAV1.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 5 | 1. Definitions. 6 | "License" shall mean the terms and conditions for use, reproduction, 7 | and distribution as defined by Sections 1 through 9 of this document. 8 | "Licensor" shall mean the copyright owner or entity authorized by 9 | the copyright owner that is granting the License. 10 | "Legal Entity" shall mean the union of the acting entity and all 11 | other entities that control, are controlled by, or are under common 12 | control with that entity. For the purposes of this definition, 13 | "control" means (i) the power, direct or indirect, to cause the 14 | direction or management of such entity, whether by contract or 15 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 16 | outstanding shares, or (iii) beneficial ownership of such entity. 17 | "You" (or "Your") shall mean an individual or Legal Entity 18 | exercising permissions granted by this License. 19 | "Source" form shall mean the preferred form for making modifications, 20 | including but not limited to software source code, documentation 21 | source, and configuration files. 22 | "Object" form shall mean any form resulting from mechanical 23 | transformation or translation of a Source form, including but 24 | not limited to compiled object code, generated documentation, 25 | and conversions to other media types. 26 | "Work" shall mean the work of authorship, whether in Source or 27 | Object form, made available under the License, as indicated by a 28 | copyright notice that is included in or attached to the work 29 | (an example is provided in the Appendix below). 30 | "Derivative Works" shall mean any work, whether in Source or Object 31 | form, that is based on (or derived from) the Work and for which the 32 | editorial revisions, annotations, elaborations, or other modifications 33 | represent, as a whole, an original work of authorship. For the purposes 34 | of this License, Derivative Works shall not include works that remain 35 | separable from, or merely link (or bind by name) to the interfaces of, 36 | the Work and Derivative Works thereof. 37 | "Contribution" shall mean any work of authorship, including 38 | the original version of the Work and any modifications or additions 39 | to that Work or Derivative Works thereof, that is intentionally 40 | submitted to Licensor for inclusion in the Work by the copyright owner 41 | or by an individual or Legal Entity authorized to submit on behalf of 42 | the copyright owner. For the purposes of this definition, "submitted" 43 | means any form of electronic, verbal, or written communication sent 44 | to the Licensor or its representatives, including but not limited to 45 | communication on electronic mailing lists, source code control systems, 46 | and issue tracking systems that are managed by, or on behalf of, the 47 | Licensor for the purpose of discussing and improving the Work, but 48 | excluding communication that is conspicuously marked or otherwise 49 | designated in writing by the copyright owner as "Not a Contribution." 50 | "Contributor" shall mean Licensor and any individual or Legal Entity 51 | on behalf of whom a Contribution has been received by Licensor and 52 | subsequently incorporated within the Work. 53 | 2. Grant of Copyright License. Subject to the terms and conditions of 54 | this License, each Contributor hereby grants to You a perpetual, 55 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 56 | copyright license to reproduce, prepare Derivative Works of, 57 | publicly display, publicly perform, sublicense, and distribute the 58 | Work and such Derivative Works in Source or Object form. 59 | 3. Grant of Patent License. Subject to the terms and conditions of 60 | this License, each Contributor hereby grants to You a perpetual, 61 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 62 | (except as stated in this section) patent license to make, have made, 63 | use, offer to sell, sell, import, and otherwise transfer the Work, 64 | where such license applies only to those patent claims licensable 65 | by such Contributor that are necessarily infringed by their 66 | Contribution(s) alone or by combination of their Contribution(s) 67 | with the Work to which such Contribution(s) was submitted. If You 68 | institute patent litigation against any entity (including a 69 | cross-claim or counterclaim in a lawsuit) alleging that the Work 70 | or a Contribution incorporated within the Work constitutes direct 71 | or contributory patent infringement, then any patent licenses 72 | granted to You under this License for that Work shall terminate 73 | as of the date such litigation is filed. 74 | 4. Redistribution. You may reproduce and distribute copies of the 75 | Work or Derivative Works thereof in any medium, with or without 76 | modifications, and in Source or Object form, provided that You 77 | meet the following conditions: 78 | (a) You must give any other recipients of the Work or 79 | Derivative Works a copy of this License; and 80 | (b) You must cause any modified files to carry prominent notices 81 | stating that You changed the files; and 82 | (c) You must retain, in the Source form of any Derivative Works 83 | that You distribute, all copyright, patent, trademark, and 84 | attribution notices from the Source form of the Work, 85 | excluding those notices that do not pertain to any part of 86 | the Derivative Works; and 87 | (d) If the Work includes a "NOTICE" text file as part of its 88 | distribution, then any Derivative Works that You distribute must 89 | include a readable copy of the attribution notices contained 90 | within such NOTICE file, excluding those notices that do not 91 | pertain to any part of the Derivative Works, in at least one 92 | of the following places: within a NOTICE text file distributed 93 | as part of the Derivative Works; within the Source form or 94 | documentation, if provided along with the Derivative Works; or, 95 | within a display generated by the Derivative Works, if and 96 | wherever such third-party notices normally appear. The contents 97 | of the NOTICE file are for informational purposes only and 98 | do not modify the License. You may add Your own attribution 99 | notices within Derivative Works that You distribute, alongside 100 | or as an addendum to the NOTICE text from the Work, provided 101 | that such additional attribution notices cannot be construed 102 | as modifying the License. 103 | You may add Your own copyright statement to Your modifications and 104 | may provide additional or different license terms and conditions 105 | for use, reproduction, or distribution of Your modifications, or 106 | for any such Derivative Works as a whole, provided Your use, 107 | reproduction, and distribution of the Work otherwise complies with 108 | the conditions stated in this License. 109 | 5. Submission of Contributions. Unless You explicitly state otherwise, 110 | any Contribution intentionally submitted for inclusion in the Work 111 | by You to the Licensor shall be under the terms and conditions of 112 | this License, without any additional terms or conditions. 113 | Notwithstanding the above, nothing herein shall supersede or modify 114 | the terms of any separate license agreement you may have executed 115 | with Licensor regarding such Contributions. 116 | 6. Trademarks. This License does not grant permission to use the trade 117 | names, trademarks, service marks, or product names of the Licensor, 118 | except as required for reasonable and customary use in describing the 119 | origin of the Work and reproducing the content of the NOTICE file. 120 | 7. Disclaimer of Warranty. Unless required by applicable law or 121 | agreed to in writing, Licensor provides the Work (and each 122 | Contributor provides its Contributions) on an "AS IS" BASIS, 123 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 124 | implied, including, without limitation, any warranties or conditions 125 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 126 | PARTICULAR PURPOSE. You are solely responsible for determining the 127 | appropriateness of using or redistributing the Work and assume any 128 | risks associated with Your exercise of permissions under this License. 129 | 8. Limitation of Liability. In no event and under no legal theory, 130 | whether in tort (including negligence), contract, or otherwise, 131 | unless required by applicable law (such as deliberate and grossly 132 | negligent acts) or agreed to in writing, shall any Contributor be 133 | liable to You for damages, including any direct, indirect, special, 134 | incidental, or consequential damages of any character arising as a 135 | result of this License or out of the use or inability to use the 136 | Work (including but not limited to damages for loss of goodwill, 137 | work stoppage, computer failure or malfunction, or any and all 138 | other commercial damages or losses), even if such Contributor 139 | has been advised of the possibility of such damages. 140 | 9. Accepting Warranty or Additional Liability. While redistributing 141 | the Work or Derivative Works thereof, You may choose to offer, 142 | and charge a fee for, acceptance of support, warranty, indemnity, 143 | or other liability obligations and/or rights consistent with this 144 | License. However, in accepting such obligations, You may act only 145 | on Your own behalf and on Your sole responsibility, not on behalf 146 | of any other Contributor, and only if You agree to indemnify, 147 | defend, and hold each Contributor harmless for any liability 148 | incurred by, or claims asserted against, such Contributor by reason 149 | of your accepting any such warranty or additional liability. 150 | END OF TERMS AND CONDITIONS 151 | APPENDIX: How to apply the Apache License to your work. 152 | To apply the Apache License to your work, attach the following 153 | boilerplate notice, with the fields enclosed by brackets "[]" 154 | replaced with your own identifying information. (Don't include 155 | the brackets!) The text should be enclosed in the appropriate 156 | comment syntax for the file format. We also recommend that a 157 | file or class name and description of purpose be included on the 158 | same "printed page" as the copyright notice for easier 159 | identification within third-party archives. 160 | Copyright [yyyy] [name of copyright owner] 161 | Licensed under the Apache License, Version 2.0 (the "License"); 162 | you may not use this file except in compliance with the License. 163 | You may obtain a copy of the License at 164 | http://www.apache.org/licenses/LICENSE-2.0 165 | Unless required by applicable law or agreed to in writing, software 166 | distributed under the License is distributed on an "AS IS" BASIS, 167 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 168 | See the License for the specific language governing permissions and 169 | limitations under the License. 170 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/LIBYUV.txt: -------------------------------------------------------------------------------- 1 | Copyright 2011 The LibYuv Project Authors. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the 13 | distribution. 14 | 15 | * Neither the name of Google nor the names of its contributors may 16 | be used to endorse or promote products derived from this software 17 | without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/PATENTS: -------------------------------------------------------------------------------- 1 | Alliance for Open Media Patent License 1.0 2 | 3 | 1. License Terms. 4 | 5 | 1.1. Patent License. Subject to the terms and conditions of this License, each 6 | Licensor, on behalf of itself and successors in interest and assigns, 7 | grants Licensee a non-sublicensable, perpetual, worldwide, non-exclusive, 8 | no-charge, royalty-free, irrevocable (except as expressly stated in this 9 | License) patent license to its Necessary Claims to make, use, sell, offer 10 | for sale, import or distribute any Implementation. 11 | 12 | 1.2. Conditions. 13 | 14 | 1.2.1. Availability. As a condition to the grant of rights to Licensee to make, 15 | sell, offer for sale, import or distribute an Implementation under 16 | Section 1.1, Licensee must make its Necessary Claims available under 17 | this License, and must reproduce this License with any Implementation 18 | as follows: 19 | 20 | a. For distribution in source code, by including this License in the 21 | root directory of the source code with its Implementation. 22 | 23 | b. For distribution in any other form (including binary, object form, 24 | and/or hardware description code (e.g., HDL, RTL, Gate Level Netlist, 25 | GDSII, etc.)), by including this License in the documentation, legal 26 | notices, and/or other written materials provided with the 27 | Implementation. 28 | 29 | 1.2.2. Additional Conditions. This license is directly from Licensor to 30 | Licensee. Licensee acknowledges as a condition of benefiting from it 31 | that no rights from Licensor are received from suppliers, distributors, 32 | or otherwise in connection with this License. 33 | 34 | 1.3. Defensive Termination. If any Licensee, its Affiliates, or its agents 35 | initiates patent litigation or files, maintains, or voluntarily 36 | participates in a lawsuit against another entity or any person asserting 37 | that any Implementation infringes Necessary Claims, any patent licenses 38 | granted under this License directly to the Licensee are immediately 39 | terminated as of the date of the initiation of action unless 1) that suit 40 | was in response to a corresponding suit regarding an Implementation first 41 | brought against an initiating entity, or 2) that suit was brought to 42 | enforce the terms of this License (including intervention in a third-party 43 | action by a Licensee). 44 | 45 | 1.4. Disclaimers. The Reference Implementation and Specification are provided 46 | "AS IS" and without warranty. The entire risk as to implementing or 47 | otherwise using the Reference Implementation or Specification is assumed 48 | by the implementer and user. Licensor expressly disclaims any warranties 49 | (express, implied, or otherwise), including implied warranties of 50 | merchantability, non-infringement, fitness for a particular purpose, or 51 | title, related to the material. IN NO EVENT WILL LICENSOR BE LIABLE TO 52 | ANY OTHER PARTY FOR LOST PROFITS OR ANY FORM OF INDIRECT, SPECIAL, 53 | INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER FROM ANY CAUSES OF 54 | ACTION OF ANY KIND WITH RESPECT TO THIS LICENSE, WHETHER BASED ON BREACH 55 | OF CONTRACT, TORT (INCLUDING NEGLIGENCE), OR OTHERWISE, AND WHETHER OR 56 | NOT THE OTHER PARTRY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 57 | 58 | 2. Definitions. 59 | 60 | 2.1. Affiliate. “Affiliate” means an entity that directly or indirectly 61 | Controls, is Controlled by, or is under common Control of that party. 62 | 63 | 2.2. Control. “Control” means direct or indirect control of more than 50% of 64 | the voting power to elect directors of that corporation, or for any other 65 | entity, the power to direct management of such entity. 66 | 67 | 2.3. Decoder. "Decoder" means any decoder that conforms fully with all 68 | non-optional portions of the Specification. 69 | 70 | 2.4. Encoder. "Encoder" means any encoder that produces a bitstream that can 71 | be decoded by a Decoder only to the extent it produces such a bitstream. 72 | 73 | 2.5. Final Deliverable. “Final Deliverable” means the final version of a 74 | deliverable approved by the Alliance for Open Media as a Final 75 | Deliverable. 76 | 77 | 2.6. Implementation. "Implementation" means any implementation, including the 78 | Reference Implementation, that is an Encoder and/or a Decoder. An 79 | Implementation also includes components of an Implementation only to the 80 | extent they are used as part of an Implementation. 81 | 82 | 2.7. License. “License” means this license. 83 | 84 | 2.8. Licensee. “Licensee” means any person or entity who exercises patent 85 | rights granted under this License. 86 | 87 | 2.9. Licensor. "Licensor" means (i) any Licensee that makes, sells, offers 88 | for sale, imports or distributes any Implementation, or (ii) a person 89 | or entity that has a licensing obligation to the Implementation as a 90 | result of its membership and/or participation in the Alliance for Open 91 | Media working group that developed the Specification. 92 | 93 | 2.10. Necessary Claims. "Necessary Claims" means all claims of patents or 94 | patent applications, (a) that currently or at any time in the future, 95 | are owned or controlled by the Licensor, and (b) (i) would be an 96 | Essential Claim as defined by the W3C Policy as of February 5, 2004 97 | (https://www.w3.org/Consortium/Patent-Policy-20040205/#def-essential) 98 | as if the Specification was a W3C Recommendation; or (ii) are infringed 99 | by the Reference Implementation. 100 | 101 | 2.11. Reference Implementation. “Reference Implementation” means an Encoder 102 | and/or Decoder released by the Alliance for Open Media as a Final 103 | Deliverable. 104 | 105 | 2.12. Specification. “Specification” means the specification designated by 106 | the Alliance for Open Media as a Final Deliverable for which this 107 | License was issued. 108 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/RAV1E.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017-2020, the rav1e contributors. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are met: 5 | 6 | * Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 9 | * Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 14 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 17 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 18 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 19 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 20 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 21 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 22 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /wheelbuild/dependency_licenses/SVT-AV1.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019, Alliance for Open Media. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions 5 | are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the 13 | distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 18 | FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 19 | COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 20 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 25 | ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 26 | POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /wheelbuild/libavif-1.0.1-local-static.patch: -------------------------------------------------------------------------------- 1 | From f8f4ed7ecec80a596f60a4a7e1392c09cedbf7ed Mon Sep 17 00:00:00 2001 2 | From: Frankie Dintino 3 | Date: Tue, 12 Sep 2023 05:47:43 -0400 4 | Subject: [PATCH] ci: link shared library build against static local 5 | 6 | --- 7 | CMakeLists.txt | 33 +++++++++++++-------------------- 8 | ext/libyuv.cmd | 2 +- 9 | 2 files changed, 14 insertions(+), 21 deletions(-) 10 | 11 | diff --git a/CMakeLists.txt b/CMakeLists.txt 12 | index 1f0cde1..521560e 100644 13 | --- a/CMakeLists.txt 14 | +++ b/CMakeLists.txt 15 | @@ -78,10 +78,10 @@ endif() 16 | 17 | if(BUILD_SHARED_LIBS) 18 | set(AVIF_LIBRARY_PREFIX "${CMAKE_SHARED_LIBRARY_PREFIX}") 19 | - set(AVIF_LIBRARY_SUFFIX "${CMAKE_SHARED_LIBRARY_SUFFIX}") 20 | else() 21 | set(AVIF_LIBRARY_PREFIX "${CMAKE_STATIC_LIBRARY_PREFIX}") 22 | - set(AVIF_LIBRARY_SUFFIX "${CMAKE_STATIC_LIBRARY_SUFFIX}") 23 | + # This is needed to get shared libraries (e.g. pixbufloader-avif) to compile against a static libavif. 24 | + set(CMAKE_POSITION_INDEPENDENT_CODE ON) 25 | endif() 26 | 27 | set(AVIF_PLATFORM_DEFINITIONS) 28 | @@ -112,7 +112,7 @@ if(AVIF_LOCAL_ZLIBPNG) 29 | set(PREV_ANDROID ${ANDROID}) 30 | set(ANDROID TRUE) 31 | set(PNG_BUILD_ZLIB "${CMAKE_CURRENT_SOURCE_DIR}/ext/zlib" CACHE STRING "" FORCE) 32 | - set(PNG_SHARED ${BUILD_SHARED_LIBS} CACHE BOOL "") 33 | + set(PNG_SHARED OFF CACHE BOOL "") 34 | set(PNG_TESTS OFF CACHE BOOL "") 35 | add_subdirectory(ext/libpng) 36 | set(PNG_PNG_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ext/libpng") 37 | @@ -135,7 +135,7 @@ if(AVIF_LOCAL_JPEG) 38 | endif() 39 | option(AVIF_LOCAL_LIBYUV "Build libyuv by providing your own copy inside the ext subdir." OFF) 40 | if(AVIF_LOCAL_LIBYUV) 41 | - set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/libyuv/build/${AVIF_LIBRARY_PREFIX}yuv${AVIF_LIBRARY_SUFFIX}") 42 | + set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/libyuv/build/${AVIF_LIBRARY_PREFIX}yuv${CMAKE_STATIC_LIBRARY_SUFFIX}") 43 | if(NOT EXISTS "${LIB_FILENAME}") 44 | message(FATAL_ERROR "libavif(AVIF_LOCAL_LIBYUV): ${LIB_FILENAME} is missing, bailing out") 45 | endif() 46 | @@ -146,13 +146,6 @@ if(AVIF_LOCAL_LIBYUV) 47 | set(LIBYUV_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ext/libyuv/include" PARENT_SCOPE) 48 | set(LIBYUV_LIBRARY ${LIB_FILENAME} PARENT_SCOPE) 49 | endif() 50 | - if(BUILD_SHARED_LIBS) 51 | - # Fix "libyuv.so: undefined reference to `jpeg_read_raw_data'" errors. 52 | - if(NOT AVIF_LOCAL_JPEG) 53 | - find_package(JPEG REQUIRED) 54 | - endif() 55 | - set(LIBYUV_LIBRARY ${LIBYUV_LIBRARY} ${JPEG_LIBRARY}) 56 | - endif() 57 | set(libyuv_FOUND TRUE) 58 | message(STATUS "libavif: local libyuv found; libyuv-based fast paths enabled.") 59 | else(AVIF_LOCAL_LIBYUV) 60 | @@ -184,7 +177,7 @@ if(libyuv_FOUND) 61 | endif(libyuv_FOUND) 62 | option(AVIF_LOCAL_LIBSHARPYUV "Build libsharpyuv by providing your own copy inside the ext subdir." OFF) 63 | if(AVIF_LOCAL_LIBSHARPYUV) 64 | - set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/libwebp/build/libsharpyuv${AVIF_LIBRARY_SUFFIX}") 65 | + set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/libwebp/build/libsharpyuv${CMAKE_STATIC_LIBRARY_SUFFIX}") 66 | if(NOT EXISTS "${LIB_FILENAME}") 67 | message(FATAL_ERROR "libavif(AVIF_LOCAL_LIBSHARPYUV): ${LIB_FILENAME} is missing, bailing out") 68 | endif() 69 | @@ -309,16 +302,16 @@ if(AVIF_CODEC_DAV1D) 70 | if(DEFINED ANDROID_ABI) 71 | set(AVIF_DAV1D_BUILD_DIR "${AVIF_DAV1D_BUILD_DIR}/${ANDROID_ABI}") 72 | endif() 73 | - set(LIB_FILENAME "${AVIF_DAV1D_BUILD_DIR}/src/libdav1d${AVIF_LIBRARY_SUFFIX}") 74 | + set(LIB_FILENAME "${AVIF_DAV1D_BUILD_DIR}/src/libdav1d${CMAKE_STATIC_LIBRARY_SUFFIX}") 75 | if(NOT EXISTS "${LIB_FILENAME}") 76 | - if("${AVIF_LIBRARY_SUFFIX}" STREQUAL ".a") 77 | + if("${CMAKE_STATIC_LIBRARY_SUFFIX}" STREQUAL ".a") 78 | message(FATAL_ERROR "libavif: ${LIB_FILENAME} is missing, bailing out") 79 | else() 80 | # On windows, meson will produce a libdav1d.a instead of the expected libdav1d.dll/.lib. 81 | # See https://github.com/mesonbuild/meson/issues/8153. 82 | set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/dav1d/build/src/libdav1d.a") 83 | if(NOT EXISTS "${LIB_FILENAME}") 84 | - message(FATAL_ERROR "libavif: ${LIB_FILENAME} (or libdav1d${AVIF_LIBRARY_SUFFIX}) is missing, bailing out") 85 | + message(FATAL_ERROR "libavif: ${LIB_FILENAME} (or libdav1d${CMAKE_STATIC_LIBRARY_SUFFIX}) is missing, bailing out") 86 | endif() 87 | endif() 88 | endif() 89 | @@ -353,7 +346,7 @@ if(AVIF_CODEC_LIBGAV1) 90 | if(DEFINED ANDROID_ABI) 91 | set(AVIF_LIBGAV1_BUILD_DIR "${AVIF_LIBGAV1_BUILD_DIR}/${ANDROID_ABI}") 92 | endif() 93 | - set(LIB_FILENAME "${AVIF_LIBGAV1_BUILD_DIR}/libgav1${AVIF_LIBRARY_SUFFIX}") 94 | + set(LIB_FILENAME "${AVIF_LIBGAV1_BUILD_DIR}/libgav1${CMAKE_STATIC_LIBRARY_SUFFIX}") 95 | if(NOT EXISTS "${LIB_FILENAME}") 96 | message(FATAL_ERROR "libavif: ${LIB_FILENAME} is missing, bailing out") 97 | endif() 98 | @@ -378,7 +371,7 @@ if(AVIF_CODEC_RAV1E) 99 | 100 | if(AVIF_LOCAL_RAV1E) 101 | set(LIB_FILENAME 102 | - "${CMAKE_CURRENT_SOURCE_DIR}/ext/rav1e/build.libavif/usr/lib/${AVIF_LIBRARY_PREFIX}rav1e${AVIF_LIBRARY_SUFFIX}" 103 | + "${CMAKE_CURRENT_SOURCE_DIR}/ext/rav1e/build.libavif/usr/lib/${AVIF_LIBRARY_PREFIX}rav1e${CMAKE_STATIC_LIBRARY_SUFFIX}" 104 | ) 105 | if(NOT EXISTS "${LIB_FILENAME}") 106 | message(FATAL_ERROR "libavif: compiled rav1e library is missing (in ext/rav1e/build.libavif/usr/lib), bailing out") 107 | @@ -411,7 +404,7 @@ if(AVIF_CODEC_SVT) 108 | 109 | if(AVIF_LOCAL_SVT) 110 | set(LIB_FILENAME 111 | - "${CMAKE_CURRENT_SOURCE_DIR}/ext/SVT-AV1/Bin/Release/${AVIF_LIBRARY_PREFIX}SvtAv1Enc${AVIF_LIBRARY_SUFFIX}" 112 | + "${CMAKE_CURRENT_SOURCE_DIR}/ext/SVT-AV1/Bin/Release/${AVIF_LIBRARY_PREFIX}SvtAv1Enc${CMAKE_STATIC_LIBRARY_SUFFIX}" 113 | ) 114 | if(NOT EXISTS "${LIB_FILENAME}") 115 | message(FATAL_ERROR "libavif: compiled svt library is missing (in ext/SVT-AV1/Bin/Release), bailing out") 116 | @@ -450,7 +443,7 @@ if(AVIF_CODEC_AOM) 117 | endif() 118 | set(AVIF_SRCS ${AVIF_SRCS} src/codec_aom.c) 119 | if(AVIF_LOCAL_AOM) 120 | - set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/aom/build.libavif/${AVIF_LIBRARY_PREFIX}aom${AVIF_LIBRARY_SUFFIX}") 121 | + set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/aom/build.libavif/${AVIF_LIBRARY_PREFIX}aom${CMAKE_STATIC_LIBRARY_SUFFIX}") 122 | if(NOT EXISTS "${LIB_FILENAME}") 123 | message(FATAL_ERROR "libavif: ${LIB_FILENAME} is missing, bailing out") 124 | endif() 125 | @@ -482,7 +475,7 @@ if(AVIF_CODEC_AVM) 126 | set(AVIF_SRCS ${AVIF_SRCS} src/codec_avm.c) 127 | if(AVIF_LOCAL_AVM) 128 | # Building the avm repository generates files such as "libaom.a" because it is a fork of aom. 129 | - set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/avm/build.libavif/${AVIF_LIBRARY_PREFIX}aom${AVIF_LIBRARY_SUFFIX}") 130 | + set(LIB_FILENAME "${CMAKE_CURRENT_SOURCE_DIR}/ext/avm/build.libavif/${AVIF_LIBRARY_PREFIX}aom${CMAKE_STATIC_LIBRARY_SUFFIX}") 131 | if(NOT EXISTS "${LIB_FILENAME}") 132 | message(FATAL_ERROR "libavif: ${LIB_FILENAME} (from avm) is missing, bailing out") 133 | endif() 134 | diff --git a/ext/libyuv.cmd b/ext/libyuv.cmd 135 | index c959777..1186156 100755 136 | --- a/ext/libyuv.cmd 137 | +++ b/ext/libyuv.cmd 138 | @@ -22,6 +22,6 @@ git checkout 464c51a0 139 | mkdir build 140 | cd build 141 | 142 | -cmake -G Ninja -DCMAKE_BUILD_TYPE=Release .. 143 | +cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DBUILD_SHARED_LIBS=OFF .. 144 | ninja yuv 145 | cd ../.. 146 | -- 147 | 2.30.0 148 | 149 | -------------------------------------------------------------------------------- /wheelbuild/toolchain-arm64-macos.cmake: -------------------------------------------------------------------------------- 1 | set(CMAKE_SYSTEM_PROCESSOR "arm64") 2 | set(CMAKE_SYSTEM_NAME "Darwin") 3 | set(CMAKE_OSX_ARCHITECTURES "arm64") 4 | set(CMAKE_C_FLAGS_INIT "-arch arm64") 5 | set(CMAKE_CXX_FLAGS_INIT "-arch arm64") 6 | set(CMAKE_EXE_LINKER_FLAGS_INIT "-arch arm64") 7 | set(CMAKE_ASM_LINKER_DEPFILE_SUPPORTED TRUE) 8 | -------------------------------------------------------------------------------- /winbuild/Findrav1e.cmake: -------------------------------------------------------------------------------- 1 | file(TO_CMAKE_PATH "${AVIF_RAV1E_ROOT}" RAV1E_ROOT_PATH) 2 | add_library(rav1e::rav1e STATIC IMPORTED GLOBAL) 3 | set_target_properties(rav1e::rav1e PROPERTIES 4 | IMPORTED_LOCATION "${RAV1E_ROOT_PATH}/lib/rav1e.lib" 5 | AVIF_LOCAL ON 6 | INTERFACE_INCLUDE_DIRECTORIES "${RAV1E_ROOT_PATH}/inc/rav1e" 7 | ) 8 | -------------------------------------------------------------------------------- /winbuild/build_prepare.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import os 5 | import platform 6 | import re 7 | import shutil 8 | import struct 9 | import subprocess 10 | from typing import Any 11 | 12 | 13 | def cmd_cd(path: str) -> str: 14 | return f"cd /D {path}" 15 | 16 | 17 | def cmd_set(name: str, value: str) -> str: 18 | return f"set {name}={value}" 19 | 20 | 21 | def cmd_append(name: str, value: str) -> str: 22 | op = "path " if name == "PATH" else f"set {name}=" 23 | return op + f"%{name}%;{value}" 24 | 25 | 26 | def cmd_copy(src: str, tgt: str) -> str: 27 | return f'copy /Y /B "{src}" "{tgt}"' 28 | 29 | 30 | def cmd_xcopy(src: str, tgt: str) -> str: 31 | return f'xcopy /Y /E "{src}" "{tgt}"' 32 | 33 | 34 | def cmd_mkdir(path: str) -> str: 35 | return f'mkdir "{path}"' 36 | 37 | 38 | def cmd_rmdir(path: str) -> str: 39 | return f'rmdir /S /Q "{path}"' 40 | 41 | 42 | def cmd_nmake( 43 | makefile: str | None = None, 44 | target: str = "", 45 | params: list[str] | None = None, 46 | ) -> str: 47 | return " ".join( 48 | [ 49 | "{nmake}", 50 | "-nologo", 51 | f'-f "{makefile}"' if makefile is not None else "", 52 | f'{" ".join(params)}' if params is not None else "", 53 | f'"{target}"', 54 | ] 55 | ) 56 | 57 | 58 | def cmds_cmake( 59 | target: str | tuple[str, ...] | list[str], *params: str, build_dir: str = "." 60 | ) -> list[str]: 61 | if not isinstance(target, str): 62 | target = " ".join(target) 63 | 64 | return [ 65 | " ".join( 66 | [ 67 | "{cmake}", 68 | "-DCMAKE_BUILD_TYPE=Release", 69 | "-DCMAKE_VERBOSE_MAKEFILE=ON", 70 | "-DCMAKE_RULE_MESSAGES:BOOL=OFF", # for NMake 71 | "-DCMAKE_C_COMPILER=cl.exe", # for Ninja 72 | "-DCMAKE_CXX_COMPILER=cl.exe", # for Ninja 73 | "-DCMAKE_C_FLAGS=-nologo", 74 | "-DCMAKE_CXX_FLAGS=-nologo", 75 | *params, 76 | '-G "{cmake_generator}"', 77 | f'-B "{build_dir}"', 78 | "-S .", 79 | ] 80 | ), 81 | f'{{cmake}} --build "{build_dir}" --clean-first --parallel --target {target}', 82 | ] 83 | 84 | 85 | def cmd_msbuild( 86 | file: str, 87 | configuration: str = "Release", 88 | target: str = "Build", 89 | plat: str = "{msbuild_arch}", 90 | ) -> str: 91 | return " ".join( 92 | [ 93 | "{msbuild}", 94 | f"{file}", 95 | f'/t:"{target}"', 96 | f'/p:Configuration="{configuration}"', 97 | f"/p:Platform={plat}", 98 | "/m", 99 | ] 100 | ) 101 | 102 | 103 | SF_PROJECTS = "https://sourceforge.net/projects" 104 | 105 | ARCHITECTURES = { 106 | "x86": {"vcvars_arch": "x86", "msbuild_arch": "Win32"}, 107 | "AMD64": {"vcvars_arch": "x86_amd64", "msbuild_arch": "x64"}, 108 | "ARM64": {"vcvars_arch": "x86_arm64", "msbuild_arch": "ARM64"}, 109 | } 110 | 111 | V = { 112 | "MESON": "1.5.1", 113 | "LIBAVIF": "1.2.1", 114 | } 115 | 116 | 117 | # dependencies, listed in order of compilation 118 | DEPS: dict[str, dict[str, Any]] = { 119 | "libavif": { 120 | "url": f"https://github.com/AOMediaCodec/libavif/archive/v{V['LIBAVIF']}.zip", 121 | "filename": f"libavif-{V['LIBAVIF']}.zip", 122 | "dir": f"libavif-{V['LIBAVIF']}", 123 | "license": "LICENSE", 124 | "build": [ 125 | cmd_mkdir("build.pillow"), 126 | cmd_cd("build.pillow"), 127 | " ".join( 128 | [ 129 | "{cmake}", 130 | "-DCMAKE_BUILD_TYPE=MinSizeRel", 131 | "-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=ON", 132 | "-DCMAKE_C_VISIBILITY_PRESET=hidden", 133 | "-DCMAKE_CXX_VISIBILITY_PRESET=hidden", 134 | "-DCMAKE_VERBOSE_MAKEFILE=ON", 135 | "-DCMAKE_RULE_MESSAGES:BOOL=OFF", 136 | "-DCMAKE_C_COMPILER=cl.exe", 137 | "-DCMAKE_CXX_COMPILER=cl.exe", 138 | "-DCMAKE_C_FLAGS=-nologo", 139 | "-DCMAKE_CXX_FLAGS=-nologo", 140 | "-DBUILD_SHARED_LIBS=OFF", 141 | "-DAVIF_CODEC_AOM=LOCAL", 142 | "-DAVIF_LIBYUV=LOCAL", 143 | "-DAVIF_LIBSHARPYUV=LOCAL", 144 | "-DAVIF_CODEC_RAV1E=LOCAL", 145 | "-DAVIF_CODEC_DAV1D=LOCAL", 146 | "-DAVIF_CODEC_SVT=LOCAL", 147 | '-G "Ninja"', 148 | "..", 149 | ] 150 | ), 151 | "ninja -v", 152 | cmd_cd(".."), 153 | cmd_xcopy("include", "{inc_dir}"), 154 | ], 155 | "libs": [r"build.pillow\avif.lib"], 156 | }, 157 | } 158 | 159 | 160 | # based on distutils._msvccompiler from CPython 3.7.4 161 | def find_msvs(architecture: str) -> dict[str, str] | None: 162 | root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") 163 | if not root: 164 | print("Program Files not found") 165 | return None 166 | 167 | requires = ["-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64"] 168 | if architecture == "ARM64": 169 | requires += ["-requires", "Microsoft.VisualStudio.Component.VC.Tools.ARM64"] 170 | 171 | try: 172 | vspath = ( 173 | subprocess.check_output( 174 | [ 175 | os.path.join( 176 | root, "Microsoft Visual Studio", "Installer", "vswhere.exe" 177 | ), 178 | "-latest", 179 | "-prerelease", 180 | *requires, 181 | "-property", 182 | "installationPath", 183 | "-products", 184 | "*", 185 | ] 186 | ) 187 | .decode(encoding="mbcs") 188 | .strip() 189 | ) 190 | except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): 191 | print("vswhere not found") 192 | return None 193 | 194 | if not os.path.isdir(os.path.join(vspath, "VC", "Auxiliary", "Build")): 195 | print("Visual Studio seems to be missing C compiler") 196 | return None 197 | 198 | # vs2017 199 | msbuild = os.path.join(vspath, "MSBuild", "15.0", "Bin", "MSBuild.exe") 200 | if not os.path.isfile(msbuild): 201 | # vs2019 202 | msbuild = os.path.join(vspath, "MSBuild", "Current", "Bin", "MSBuild.exe") 203 | if not os.path.isfile(msbuild): 204 | print("Visual Studio MSBuild not found") 205 | return None 206 | 207 | vcvarsall = os.path.join(vspath, "VC", "Auxiliary", "Build", "vcvarsall.bat") 208 | if not os.path.isfile(vcvarsall): 209 | print("Visual Studio vcvarsall not found") 210 | return None 211 | 212 | return { 213 | "vs_dir": vspath, 214 | "msbuild": f'"{msbuild}"', 215 | "vcvarsall": f'"{vcvarsall}"', 216 | "nmake": "nmake.exe", # nmake selected by vcvarsall 217 | } 218 | 219 | 220 | def download_dep(url: str, file: str) -> None: 221 | import urllib.error 222 | import urllib.request 223 | 224 | ex = None 225 | for i in range(3): 226 | try: 227 | print(f"Fetching {url} (attempt {i + 1})...") 228 | content = urllib.request.urlopen(url).read() 229 | with open(file, "wb") as f: 230 | f.write(content) 231 | break 232 | except urllib.error.URLError as e: 233 | ex = e 234 | else: 235 | raise RuntimeError(ex) 236 | 237 | 238 | def extract_dep(url: str, filename: str, prefs: dict[str, str]) -> None: 239 | import tarfile 240 | import zipfile 241 | 242 | depends_dir = prefs["depends_dir"] 243 | sources_dir = prefs["src_dir"] 244 | 245 | file = os.path.join(depends_dir, filename) 246 | if not os.path.exists(file): 247 | download_dep(url, file) 248 | 249 | print("Extracting " + filename) 250 | sources_dir_abs = os.path.abspath(sources_dir) 251 | if filename.endswith(".zip"): 252 | with zipfile.ZipFile(file) as zf: 253 | for member in zf.namelist(): 254 | member_abspath = os.path.abspath(os.path.join(sources_dir, member)) 255 | member_prefix = os.path.commonpath([sources_dir_abs, member_abspath]) 256 | if sources_dir_abs != member_prefix: 257 | msg = "Attempted Path Traversal in Zip File" 258 | raise RuntimeError(msg) 259 | zf.extractall(sources_dir) 260 | elif filename.endswith((".tar.gz", ".tgz")): 261 | with tarfile.open(file, "r:gz") as tgz: 262 | for member in tgz.getnames(): 263 | member_abspath = os.path.abspath(os.path.join(sources_dir, member)) 264 | member_prefix = os.path.commonpath([sources_dir_abs, member_abspath]) 265 | if sources_dir_abs != member_prefix: 266 | msg = "Attempted Path Traversal in Tar File" 267 | raise RuntimeError(msg) 268 | tgz.extractall(sources_dir) 269 | else: 270 | msg = "Unknown archive type: " + filename 271 | raise RuntimeError(msg) 272 | 273 | 274 | def write_script( 275 | name: str, lines: list[str], prefs: dict[str, str], verbose: bool 276 | ) -> None: 277 | name = os.path.join(prefs["build_dir"], name) 278 | lines = [line.format(**prefs) for line in lines] 279 | print("Writing " + name) 280 | with open(name, "w", newline="") as f: 281 | f.write(os.linesep.join(lines)) 282 | if verbose: 283 | for line in lines: 284 | print(" " + line) 285 | 286 | 287 | def get_footer(dep: dict[str, Any]) -> list[str]: 288 | lines = [] 289 | for out in dep.get("headers", []): 290 | lines.append(cmd_copy(out, "{inc_dir}")) 291 | for out in dep.get("libs", []): 292 | lines.append(cmd_copy(out, "{lib_dir}")) 293 | for out in dep.get("bins", []): 294 | lines.append(cmd_copy(out, "{bin_dir}")) 295 | return lines 296 | 297 | 298 | def build_env(prefs: dict[str, str], verbose: bool) -> None: 299 | lines = [ 300 | "if defined DISTUTILS_USE_SDK goto end", 301 | cmd_set("INCLUDE", "{inc_dir}"), 302 | cmd_set("INCLIB", "{lib_dir}"), 303 | cmd_set("LIB", "{lib_dir}"), 304 | cmd_append("PATH", "{bin_dir}"), 305 | "call {vcvarsall} {vcvars_arch}", 306 | cmd_set("DISTUTILS_USE_SDK", "1"), # use same compiler to build Pillow 307 | cmd_set("py_vcruntime_redist", "true"), # always use /MD, never /MT 308 | ":end", 309 | "@echo on", 310 | ] 311 | write_script("build_env.cmd", lines, prefs, verbose) 312 | 313 | 314 | def build_dep(name: str, prefs: dict[str, str], verbose: bool) -> str: 315 | dep = DEPS[name] 316 | directory = dep["dir"] 317 | file = f"build_dep_{name}.cmd" 318 | license_dir = prefs["license_dir"] 319 | sources_dir = prefs["src_dir"] 320 | 321 | extract_dep(dep["url"], dep["filename"], prefs) 322 | 323 | licenses = dep["license"] 324 | if isinstance(licenses, str): 325 | licenses = [licenses] 326 | license_text = "" 327 | for license_file in licenses: 328 | with open(os.path.join(sources_dir, directory, license_file)) as f: 329 | license_text += f.read() 330 | if "license_pattern" in dep: 331 | match = re.search(dep["license_pattern"], license_text, re.DOTALL) 332 | assert match is not None 333 | license_text = "\n".join(match.groups()) 334 | assert len(license_text) > 50 335 | with open(os.path.join(license_dir, f"{directory}.txt"), "w") as f: 336 | print(f"Writing license {directory}.txt") 337 | f.write(license_text) 338 | 339 | for patch_file, patch_list in dep.get("patch", {}).items(): 340 | patch_file = os.path.join(sources_dir, directory, patch_file.format(**prefs)) 341 | with open(patch_file) as f: 342 | text = f.read() 343 | for patch_from, patch_to in patch_list.items(): 344 | patch_from = patch_from.format(**prefs) 345 | patch_to = patch_to.format(**prefs) 346 | assert patch_from in text 347 | text = text.replace(patch_from, patch_to) 348 | with open(patch_file, "w") as f: 349 | print(f"Patching {patch_file}") 350 | f.write(text) 351 | 352 | banner = f"Building {name} ({directory})" 353 | lines = [ 354 | r'call "{build_dir}\build_env.cmd"', 355 | "@echo " + ("=" * 70), 356 | f"@echo ==== {banner:<60} ====", 357 | "@echo " + ("=" * 70), 358 | cmd_cd(os.path.join(sources_dir, directory)), 359 | *dep.get("build", []), 360 | *get_footer(dep), 361 | ] 362 | 363 | write_script(file, lines, prefs, verbose) 364 | return file 365 | 366 | 367 | def build_dep_all(disabled: list[str], prefs: dict[str, str], verbose: bool) -> None: 368 | lines = [r'call "{build_dir}\build_env.cmd"'] 369 | gha_groups = "GITHUB_ACTIONS" in os.environ 370 | scripts = ["install_meson.cmd"] 371 | for dep_name in DEPS: 372 | print() 373 | if dep_name in disabled: 374 | print(f"Skipping disabled dependency {dep_name}") 375 | continue 376 | scripts.append(build_dep(dep_name, prefs, verbose)) 377 | 378 | for script in scripts: 379 | if gha_groups: 380 | lines.append(f"@echo ::group::Running {script}") 381 | lines.append(rf'cmd.exe /c "{{build_dir}}\{script}"') 382 | lines.append("if errorlevel 1 echo Build failed! && exit /B 1") 383 | if gha_groups: 384 | lines.append("@echo ::endgroup::") 385 | print() 386 | lines.append("@echo All Pillow dependencies built successfully!") 387 | write_script("build_dep_all.cmd", lines, prefs, verbose) 388 | 389 | 390 | def main() -> None: 391 | winbuild_dir = os.path.dirname(os.path.realpath(__file__)) 392 | 393 | parser = argparse.ArgumentParser( 394 | prog="winbuild\\build_prepare.py", 395 | description=( 396 | "Download and generate build scripts " 397 | "for pillow-avif-plugin dependencies." 398 | ), 399 | ) 400 | parser.add_argument( 401 | "-v", "--verbose", action="store_true", help="print generated scripts" 402 | ) 403 | parser.add_argument( 404 | "-d", 405 | "--dir", 406 | "--build-dir", 407 | dest="build_dir", 408 | metavar="PILLOW_AVIF_PLUGIN_BUILD", 409 | default=os.environ.get( 410 | "PILLOW_AVIF_PLUGIN_BUILD", os.path.join(winbuild_dir, "build") 411 | ), 412 | help="build directory (default: 'winbuild\\build')", 413 | ) 414 | parser.add_argument( 415 | "--depends", 416 | dest="depends_dir", 417 | metavar="PILLOW_AVIF_PLUGIN_DEPS", 418 | default=os.environ.get( 419 | "PILLOW_AVIF_PLUGIN_DEPS", os.path.join(winbuild_dir, "depends") 420 | ), 421 | help="directory used to store cached dependencies " 422 | "(default: 'winbuild\\depends')", 423 | ) 424 | parser.add_argument( 425 | "--architecture", 426 | choices=ARCHITECTURES, 427 | default=os.environ.get( 428 | "ARCHITECTURE", 429 | ( 430 | "ARM64" 431 | if platform.machine() == "ARM64" 432 | else ("x86" if struct.calcsize("P") == 4 else "AMD64") 433 | ), 434 | ), 435 | help="build architecture (default: same as host Python)", 436 | ) 437 | parser.add_argument( 438 | "--nmake", 439 | dest="cmake_generator", 440 | action="store_const", 441 | const="NMake Makefiles", 442 | default="Ninja", 443 | help="build dependencies using NMake instead of Ninja", 444 | ) 445 | 446 | args = parser.parse_args() 447 | 448 | arch_prefs = ARCHITECTURES[args.architecture] 449 | print("Target architecture:", args.architecture) 450 | 451 | msvs = find_msvs(args.architecture) 452 | if msvs is None: 453 | msg = "Visual Studio not found. Please install Visual Studio 2017 or newer." 454 | raise RuntimeError(msg) 455 | print("Found Visual Studio at:", msvs["vs_dir"]) 456 | 457 | # dependency cache directory 458 | args.depends_dir = os.path.abspath(args.depends_dir) 459 | os.makedirs(args.depends_dir, exist_ok=True) 460 | print("Caching dependencies in:", args.depends_dir) 461 | 462 | args.build_dir = os.path.abspath(args.build_dir) 463 | print("Using output directory:", args.build_dir) 464 | 465 | # build directory for *.h files 466 | inc_dir = os.path.join(args.build_dir, "inc") 467 | # build directory for *.lib files 468 | lib_dir = os.path.join(args.build_dir, "lib") 469 | # build directory for *.bin files 470 | bin_dir = os.path.join(args.build_dir, "bin") 471 | # directory for storing project files 472 | sources_dir = os.path.join(args.build_dir, "src") 473 | # copy dependency licenses to this directory 474 | license_dir = os.path.join(args.build_dir, "license") 475 | 476 | shutil.rmtree(args.build_dir, ignore_errors=True) 477 | os.makedirs(args.build_dir, exist_ok=False) 478 | for path in [inc_dir, lib_dir, bin_dir, sources_dir, license_dir]: 479 | os.makedirs(path, exist_ok=True) 480 | 481 | disabled = [] 482 | 483 | prefs = { 484 | "architecture": args.architecture, 485 | **arch_prefs, 486 | # Pillow paths 487 | "winbuild_dir": winbuild_dir, 488 | "winbuild_dir_cmake": winbuild_dir.replace("\\", "/"), 489 | # Build paths 490 | "bin_dir": bin_dir, 491 | "build_dir": args.build_dir, 492 | "depends_dir": args.depends_dir, 493 | "inc_dir": inc_dir, 494 | "lib_dir": lib_dir, 495 | "license_dir": license_dir, 496 | "src_dir": sources_dir, 497 | # Compilers / Tools 498 | **msvs, 499 | "cmake": "cmake.exe", # TODO find CMAKE automatically 500 | "cmake_generator": args.cmake_generator, 501 | # TODO find NASM automatically 502 | } 503 | 504 | for k, v in DEPS.items(): 505 | prefs[f"dir_{k}"] = os.path.join(sources_dir, v["dir"]) 506 | 507 | print() 508 | 509 | write_script(".gitignore", ["*"], prefs, args.verbose) 510 | write_script( 511 | "install_meson.cmd", 512 | [ 513 | r'call "{build_dir}\build_env.cmd"', 514 | "@echo " + ("=" * 70), 515 | f"@echo ==== {'Building meson':<60} ====", 516 | "@echo " + ("=" * 70), 517 | f"python -mpip install meson=={V['MESON']}", 518 | ], 519 | prefs, 520 | args.verbose, 521 | ) 522 | build_env(prefs, args.verbose) 523 | build_dep_all(disabled, prefs, args.verbose) 524 | 525 | 526 | if __name__ == "__main__": 527 | main() 528 | --------------------------------------------------------------------------------