├── .azure-pipelines └── azure-pipelines-osx.yml ├── .ci_support ├── README ├── linux_64_cuda_compiler_version12.8microarch_level1.yaml ├── linux_64_cuda_compiler_version12.8microarch_level3.yaml ├── linux_64_cuda_compiler_versionNonemicroarch_level1.yaml ├── linux_64_cuda_compiler_versionNonemicroarch_level3.yaml ├── linux_aarch64_cuda_compiler_versionNone.yaml ├── migrations │ ├── absl_grpc_proto_25Q2.yaml │ ├── cuda128.yaml │ └── cudnn910.yaml ├── osx_64_.yaml └── osx_arm64_.yaml ├── .circleci └── config.yml ├── .gitattributes ├── .github ├── CODEOWNERS └── workflows │ └── conda-build.yml ├── .gitignore ├── .scripts ├── build_steps.sh ├── logging_utils.sh ├── run_docker_build.sh └── run_osx_build.sh ├── LICENSE.txt ├── README.md ├── azure-pipelines.yml ├── build-locally.py ├── conda-forge.yml └── recipe ├── LICENSE ├── README.md ├── add_py_toolchain.sh ├── bld.bat ├── build.sh ├── build_common.sh ├── build_estimator.sh ├── build_pkg.bat ├── build_pkg.sh ├── conda_build_config.yaml ├── cp_libtensorflow.sh ├── cp_libtensorflow_cc.sh ├── cp_libtensorflow_framework.sh ├── meta.yaml ├── patches ├── 0001-loosen-requirements.patch ├── 0002-Add-additional-absl_synchronization-linkage-to-gRPC.patch ├── 0003-Fix-missing-abseil-linkages.patch ├── 0004-Fix-protobuf_python-for-systemlibs.patch ├── 0005-Add-absl_log-systemlib.patch ├── 0006-Omit-linking-to-layout_proto_cc-if-protobuf-linkage-.patch ├── 0007-Fix-further-abseil-linkage.patch ├── 0008-Add-constraint-to-pybind11-systemlib.patch ├── 0009-Different-file-ending-for-flatbuffers-LICENSE.patch ├── 0010-Use-correct-hermetic-python.patch ├── 0011-Add-well_known_types_py_pb2-to-protobuf-systemlib.patch ├── 0012-Add-protobuf-toolchain.patch ├── 0013-fix-genproto.patch ├── 0014-Remove-some-usage-of-absl-str_format-in-CUDA.patch ├── 0015-Adjust-relative-path-for-libdevice.patch ├── 0016-Link-to-absl_log_flags-instead-of-absl_flags.patch ├── 0017-Update-ABSL-Log-Definition-for-libabsl_vlog_config_i.patch ├── 0018-add-absl_string_view-target.patch ├── 0019-add-absl_nullability-target.patch ├── 0020-add-absl_prefetch-target.patch ├── 0021-add-absl_die_if_null-target.patch ├── 0022-add-absl_crc32c-targets.patch ├── 0023-add-kernel_timeout_internal-target.patch ├── 0024-work-around-for-warning-that-clang-falsely-treats-as.patch ├── 0025-Hardcode-BUILD_PREFIX-in-build_pip_package.patch ├── 0026-Only-link-to-sparse_core_layout_proto_cc-headers.patch ├── 0027-Protobuf-5-compatability.patch ├── 0028-Avoid-linking-with-internal-nvrtc.patch ├── 0029-remove-dependencies-to-libcuda.patch ├── 0030-Fixup-pybind11_protobuf.patch ├── 0031-Update-linkages-for-new-absl-organization.patch ├── 0032-Remove-ambiguous-inherited-constructor-in-default_qu.patch ├── 0033-third_party-ducc-fix-ambiguous-failure.patch ├── 0034-third_party-tf_runtime-fix-compile-failure.patch ├── 0035-support-to-build-with-gcc-15.patch ├── 0036-third_party-eigen_archive-workaround-ice-failure-whi.patch ├── 0037-add-absl_tracing_internal.patch ├── 0038-Fix-building-different-python-wheels-from-one-python.patch ├── 0039-Fix-matmul-unused-result-error.patch ├── 0040-Support-cuda-12.8.patch ├── 0041-Disable-profiler.patch ├── 0042-bump-h5py-req.patch └── 0043-cross-arch-config.patch ├── py_toolchain.bzl ├── pybind11_protobuf ├── 0001-DO-not-link-to-proto_api.patch └── 0002-Add-Python-include-path.patch ├── test_c.c ├── test_cc.cc ├── test_libtensorflow.sh ├── test_libtensorflow_cc.sh └── test_tensorflow.py /.azure-pipelines/azure-pipelines-osx.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | jobs: 6 | - job: osx 7 | pool: 8 | vmImage: macOS-15 9 | strategy: 10 | matrix: 11 | osx_64_: 12 | CONFIG: osx_64_ 13 | UPLOAD_PACKAGES: 'True' 14 | osx_arm64_: 15 | CONFIG: osx_arm64_ 16 | UPLOAD_PACKAGES: 'True' 17 | timeoutInMinutes: 360 18 | variables: {} 19 | 20 | steps: 21 | # TODO: Fast finish on azure pipelines? 22 | - script: | 23 | export CI=azure 24 | export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) 25 | export remote_url=$(Build.Repository.Uri) 26 | export sha=$(Build.SourceVersion) 27 | export OSX_FORCE_SDK_DOWNLOAD="1" 28 | export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME 29 | export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) 30 | if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then 31 | export IS_PR_BUILD="True" 32 | else 33 | export IS_PR_BUILD="False" 34 | fi 35 | ./.scripts/run_osx_build.sh 36 | displayName: Run OSX build 37 | env: 38 | BINSTAR_TOKEN: $(BINSTAR_TOKEN) 39 | FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) 40 | STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) 41 | -------------------------------------------------------------------------------- /.ci_support/README: -------------------------------------------------------------------------------- 1 | This file is automatically generated by conda-smithy. If any 2 | particular build configuration is expected, but it is not found, 3 | please make sure all dependencies are satisfiable. To add/modify any 4 | matrix elements, you should create/change conda-smithy's input 5 | recipe/conda_build_config.yaml and re-render the recipe, rather than 6 | editing these files directly. 7 | -------------------------------------------------------------------------------- /.ci_support/linux_64_cuda_compiler_version12.8microarch_level1.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | c_compiler_version: 4 | - '14' 5 | c_stdlib: 6 | - sysroot 7 | c_stdlib_version: 8 | - '2.17' 9 | channel_sources: 10 | - conda-forge 11 | channel_targets: 12 | - conda-forge main 13 | cuda_compiler: 14 | - cuda-nvcc 15 | cuda_compiler_version: 16 | - '12.8' 17 | cudnn: 18 | - '9.10' 19 | cxx_compiler: 20 | - gxx 21 | cxx_compiler_version: 22 | - '14' 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-x86_64:alma9 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | github_actions_labels: 30 | - cirun-openstack-cpu-2xlarge 31 | icu: 32 | - '75' 33 | libabseil: 34 | - '20250512' 35 | libcurl: 36 | - '8' 37 | libgrpc: 38 | - '1.73' 39 | libjpeg_turbo: 40 | - '3' 41 | libpng: 42 | - '1.6' 43 | libprotobuf: 44 | - 6.31.1 45 | libtensorflow_cc: 46 | - '2.16' 47 | microarch_level: 48 | - '1' 49 | nccl: 50 | - '2' 51 | numpy: 52 | - '2' 53 | openssl: 54 | - '3.5' 55 | perl: 56 | - 5.32.1 57 | pin_run_as_build: 58 | python: 59 | min_pin: x.x 60 | max_pin: x.x 61 | python: 62 | - 3.10.* *_cpython 63 | - 3.11.* *_cpython 64 | - 3.12.* *_cpython 65 | - 3.13.* *_cp313 66 | snappy: 67 | - '1.2' 68 | sqlite: 69 | - '3' 70 | target_platform: 71 | - linux-64 72 | tensorflow: 73 | - '2.16' 74 | zip_keys: 75 | - - c_compiler_version 76 | - cxx_compiler_version 77 | - c_stdlib_version 78 | - cuda_compiler_version 79 | - cudnn 80 | zlib: 81 | - '1' 82 | -------------------------------------------------------------------------------- /.ci_support/linux_64_cuda_compiler_version12.8microarch_level3.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | c_compiler_version: 4 | - '14' 5 | c_stdlib: 6 | - sysroot 7 | c_stdlib_version: 8 | - '2.17' 9 | channel_sources: 10 | - conda-forge 11 | channel_targets: 12 | - conda-forge main 13 | cuda_compiler: 14 | - cuda-nvcc 15 | cuda_compiler_version: 16 | - '12.8' 17 | cudnn: 18 | - '9.10' 19 | cxx_compiler: 20 | - gxx 21 | cxx_compiler_version: 22 | - '14' 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-x86_64:alma9 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | github_actions_labels: 30 | - cirun-openstack-cpu-2xlarge 31 | icu: 32 | - '75' 33 | libabseil: 34 | - '20250512' 35 | libcurl: 36 | - '8' 37 | libgrpc: 38 | - '1.73' 39 | libjpeg_turbo: 40 | - '3' 41 | libpng: 42 | - '1.6' 43 | libprotobuf: 44 | - 6.31.1 45 | libtensorflow_cc: 46 | - '2.16' 47 | microarch_level: 48 | - '3' 49 | nccl: 50 | - '2' 51 | numpy: 52 | - '2' 53 | openssl: 54 | - '3.5' 55 | perl: 56 | - 5.32.1 57 | pin_run_as_build: 58 | python: 59 | min_pin: x.x 60 | max_pin: x.x 61 | python: 62 | - 3.10.* *_cpython 63 | - 3.11.* *_cpython 64 | - 3.12.* *_cpython 65 | - 3.13.* *_cp313 66 | snappy: 67 | - '1.2' 68 | sqlite: 69 | - '3' 70 | target_platform: 71 | - linux-64 72 | tensorflow: 73 | - '2.16' 74 | zip_keys: 75 | - - c_compiler_version 76 | - cxx_compiler_version 77 | - c_stdlib_version 78 | - cuda_compiler_version 79 | - cudnn 80 | zlib: 81 | - '1' 82 | -------------------------------------------------------------------------------- /.ci_support/linux_64_cuda_compiler_versionNonemicroarch_level1.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | c_compiler_version: 4 | - '14' 5 | c_stdlib: 6 | - sysroot 7 | c_stdlib_version: 8 | - '2.17' 9 | channel_sources: 10 | - conda-forge 11 | channel_targets: 12 | - conda-forge main 13 | cuda_compiler: 14 | - cuda-nvcc 15 | cuda_compiler_version: 16 | - None 17 | cudnn: 18 | - '9' 19 | cxx_compiler: 20 | - gxx 21 | cxx_compiler_version: 22 | - '14' 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-x86_64:alma9 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | github_actions_labels: 30 | - cirun-openstack-cpu-2xlarge 31 | icu: 32 | - '75' 33 | libabseil: 34 | - '20250512' 35 | libcurl: 36 | - '8' 37 | libgrpc: 38 | - '1.73' 39 | libjpeg_turbo: 40 | - '3' 41 | libpng: 42 | - '1.6' 43 | libprotobuf: 44 | - 6.31.1 45 | libtensorflow_cc: 46 | - '2.16' 47 | microarch_level: 48 | - '1' 49 | nccl: 50 | - '2' 51 | numpy: 52 | - '2' 53 | openssl: 54 | - '3.5' 55 | perl: 56 | - 5.32.1 57 | pin_run_as_build: 58 | python: 59 | min_pin: x.x 60 | max_pin: x.x 61 | python: 62 | - 3.10.* *_cpython 63 | - 3.11.* *_cpython 64 | - 3.12.* *_cpython 65 | - 3.13.* *_cp313 66 | snappy: 67 | - '1.2' 68 | sqlite: 69 | - '3' 70 | target_platform: 71 | - linux-64 72 | tensorflow: 73 | - '2.16' 74 | zip_keys: 75 | - - c_compiler_version 76 | - cxx_compiler_version 77 | - c_stdlib_version 78 | - cuda_compiler_version 79 | - cudnn 80 | zlib: 81 | - '1' 82 | -------------------------------------------------------------------------------- /.ci_support/linux_64_cuda_compiler_versionNonemicroarch_level3.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | c_compiler_version: 4 | - '14' 5 | c_stdlib: 6 | - sysroot 7 | c_stdlib_version: 8 | - '2.17' 9 | channel_sources: 10 | - conda-forge 11 | channel_targets: 12 | - conda-forge main 13 | cuda_compiler: 14 | - cuda-nvcc 15 | cuda_compiler_version: 16 | - None 17 | cudnn: 18 | - '9' 19 | cxx_compiler: 20 | - gxx 21 | cxx_compiler_version: 22 | - '14' 23 | docker_image: 24 | - quay.io/condaforge/linux-anvil-x86_64:alma9 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | github_actions_labels: 30 | - cirun-openstack-cpu-2xlarge 31 | icu: 32 | - '75' 33 | libabseil: 34 | - '20250512' 35 | libcurl: 36 | - '8' 37 | libgrpc: 38 | - '1.73' 39 | libjpeg_turbo: 40 | - '3' 41 | libpng: 42 | - '1.6' 43 | libprotobuf: 44 | - 6.31.1 45 | libtensorflow_cc: 46 | - '2.16' 47 | microarch_level: 48 | - '3' 49 | nccl: 50 | - '2' 51 | numpy: 52 | - '2' 53 | openssl: 54 | - '3.5' 55 | perl: 56 | - 5.32.1 57 | pin_run_as_build: 58 | python: 59 | min_pin: x.x 60 | max_pin: x.x 61 | python: 62 | - 3.10.* *_cpython 63 | - 3.11.* *_cpython 64 | - 3.12.* *_cpython 65 | - 3.13.* *_cp313 66 | snappy: 67 | - '1.2' 68 | sqlite: 69 | - '3' 70 | target_platform: 71 | - linux-64 72 | tensorflow: 73 | - '2.16' 74 | zip_keys: 75 | - - c_compiler_version 76 | - cxx_compiler_version 77 | - c_stdlib_version 78 | - cuda_compiler_version 79 | - cudnn 80 | zlib: 81 | - '1' 82 | -------------------------------------------------------------------------------- /.ci_support/linux_aarch64_cuda_compiler_versionNone.yaml: -------------------------------------------------------------------------------- 1 | c_compiler: 2 | - gcc 3 | c_compiler_version: 4 | - '11' 5 | c_stdlib: 6 | - sysroot 7 | c_stdlib_version: 8 | - '2.17' 9 | channel_sources: 10 | - conda-forge 11 | channel_targets: 12 | - conda-forge main 13 | cuda_compiler: 14 | - cuda-nvcc 15 | cuda_compiler_version: 16 | - None 17 | cxx_compiler: 18 | - gxx 19 | cxx_compiler_version: 20 | - '11' 21 | docker_image: 22 | - quay.io/condaforge/linux-anvil-x86_64:alma9 23 | flatbuffers: 24 | - 25.2.10 25 | giflib: 26 | - '5.2' 27 | github_actions_labels: 28 | - cirun-openstack-cpu-2xlarge 29 | icu: 30 | - '75' 31 | libabseil: 32 | - '20250512' 33 | libcurl: 34 | - '8' 35 | libgrpc: 36 | - '1.73' 37 | libjpeg_turbo: 38 | - '3' 39 | libpng: 40 | - '1.6' 41 | libprotobuf: 42 | - 6.31.1 43 | libtensorflow_cc: 44 | - '2.16' 45 | microarch_level: 46 | - '1' 47 | numpy: 48 | - '2' 49 | openssl: 50 | - '3.5' 51 | perl: 52 | - 5.32.1 53 | pin_run_as_build: 54 | python: 55 | min_pin: x.x 56 | max_pin: x.x 57 | python: 58 | - 3.10.* *_cpython 59 | - 3.11.* *_cpython 60 | - 3.12.* *_cpython 61 | - 3.13.* *_cp313 62 | snappy: 63 | - '1.2' 64 | sqlite: 65 | - '3' 66 | target_platform: 67 | - linux-aarch64 68 | tensorflow: 69 | - '2.16' 70 | zip_keys: 71 | - - c_compiler_version 72 | - cxx_compiler_version 73 | - c_stdlib_version 74 | - cuda_compiler_version 75 | zlib: 76 | - '1' 77 | -------------------------------------------------------------------------------- /.ci_support/migrations/absl_grpc_proto_25Q2.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | commit_message: Rebuild for libabseil 20250512, libgrpc 1.73 & libprotobuf 6.31.1 4 | kind: version 5 | migration_number: 1 6 | exclude: 7 | # core deps 8 | - abseil-cpp 9 | - grpc-cpp 10 | - libprotobuf 11 | # required for building/testing 12 | - protobuf 13 | - re2 14 | # bazel stack 15 | - bazel 16 | - grpc_java_plugin 17 | - singlejar 18 | libabseil: 19 | - 20250512 20 | libgrpc: 21 | - "1.73" 22 | libprotobuf: 23 | - 6.31.1 24 | # we need to leave this migration open until we're ready to move the global baseline, see 25 | # https://github.com/conda-forge/conda-forge.github.io/issues/2467; grpc 1.72 requires 11.0, 26 | # see https://github.com/grpc/grpc/commit/f122d248443c81592e748da1adb240cbf0a0231c 27 | c_stdlib_version: # [osx] 28 | - 11.0 # [osx] 29 | migrator_ts: 1748506837.6039238 30 | -------------------------------------------------------------------------------- /.ci_support/migrations/cuda128.yaml: -------------------------------------------------------------------------------- 1 | migrator_ts: 1738229377 2 | __migrator: 3 | kind: 4 | version 5 | migration_number: 6 | 1 7 | build_number: 8 | 1 9 | paused: false 10 | use_local: true 11 | override_cbc_keys: 12 | - cuda_compiler_stub 13 | check_solvable: false 14 | primary_key: cuda_compiler_version 15 | ordering: 16 | cuda_compiler_version: 17 | - 11.8 18 | - 12.4 19 | - 12.6 20 | - None 21 | - 12.8 22 | - 12.9 23 | - 13.0 24 | commit_message: | 25 | Upgrade to CUDA 12.8 26 | 27 | With CUDA 12.8, the following new architectures are added `sm_100`, `sm_101` and `sm_120`. 28 | To build for these architectures, maintainers will need to add these to list of architectures 29 | that their package builds for. 30 | 31 | ref: https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#new-features 32 | 33 | cuda_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 34 | - 12.8 # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 35 | 36 | c_stdlib_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 37 | - 2.17 # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 38 | 39 | cudnn: # [(linux or win) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 40 | - 9 # [(linux or win) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 41 | 42 | c_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 43 | - 11 # [linux and aarch64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 44 | - 14 # [linux and x86_64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 45 | 46 | cxx_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 47 | - 11 # [linux and aarch64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 48 | - 14 # [linux and x86_64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 49 | 50 | fortran_compiler_version: # [linux and (x86_64 or aarch64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 51 | - 11 # [linux and aarch64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 52 | - 14 # [linux and x86_64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 53 | -------------------------------------------------------------------------------- /.ci_support/migrations/cudnn910.yaml: -------------------------------------------------------------------------------- 1 | # needs to be ordered before CUDA 13.0 migrator 2 | migrator_ts: 1755016000 3 | __migrator: 4 | kind: 5 | version 6 | migration_number: 7 | 1 8 | build_number: 9 | 1 10 | ordering: 11 | cudnn: 12 | - None 13 | - 9 14 | - 9.10 15 | commit_message: | 16 | Rebuild for cudnn 9.10 17 | 18 | Conda-forge assumed cudnn 9.x builds would stay compatible with each other. 19 | This is the case for the API/ABI of the library, but nvidia dropped support 20 | for older GPU architectures in cudnn 9.11. Since we have no package-level 21 | metadata about compatibility with specific GPU architectures, this effectively 22 | breaks all packages built atop cudnn for users on affected older GPUs. 23 | 24 | In order to remedy this situation, we need to rebuild all cudnn-dependent 25 | feedstocks against cudnn 9.10 (the last version with full architecture support), 26 | before we mark all those newer cudnn builds as broken. This only affects artefacts 27 | for CUDA 12.x; those for CUDA 13.x are not affected, since CUDA 13 never supported 28 | those older architectures in the first place. 29 | 30 | For more details see: https://github.com/conda-forge/cudnn-feedstock/issues/124 31 | 32 | cudnn: 33 | - None 34 | - 9.10 # [((linux and not ppc64le) or win64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 35 | -------------------------------------------------------------------------------- /.ci_support/osx_64_.yaml: -------------------------------------------------------------------------------- 1 | MACOSX_DEPLOYMENT_TARGET: 2 | - '11.0' 3 | MACOSX_SDK_VERSION: 4 | - '11.0' 5 | c_compiler: 6 | - clang 7 | c_compiler_version: 8 | - '19' 9 | c_stdlib: 10 | - macosx_deployment_target 11 | c_stdlib_version: 12 | - '11.0' 13 | channel_sources: 14 | - conda-forge 15 | channel_targets: 16 | - conda-forge main 17 | cuda_compiler: 18 | - cuda-nvcc 19 | cuda_compiler_version: 20 | - None 21 | cxx_compiler: 22 | - clangxx 23 | cxx_compiler_version: 24 | - '19' 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | icu: 30 | - '75' 31 | libabseil: 32 | - '20250512' 33 | libcurl: 34 | - '8' 35 | libgrpc: 36 | - '1.73' 37 | libjpeg_turbo: 38 | - '3' 39 | libpng: 40 | - '1.6' 41 | libprotobuf: 42 | - 6.31.1 43 | libtensorflow_cc: 44 | - '2.16' 45 | macos_machine: 46 | - x86_64-apple-darwin13.4.0 47 | microarch_level: 48 | - '1' 49 | numpy: 50 | - '2' 51 | openssl: 52 | - '3.5' 53 | pin_run_as_build: 54 | python: 55 | min_pin: x.x 56 | max_pin: x.x 57 | python: 58 | - 3.10.* *_cpython 59 | - 3.11.* *_cpython 60 | - 3.12.* *_cpython 61 | - 3.13.* *_cp313 62 | snappy: 63 | - '1.2' 64 | sqlite: 65 | - '3' 66 | target_platform: 67 | - osx-64 68 | tensorflow: 69 | - '2.16' 70 | zip_keys: 71 | - - c_compiler_version 72 | - cxx_compiler_version 73 | zlib: 74 | - '1' 75 | -------------------------------------------------------------------------------- /.ci_support/osx_arm64_.yaml: -------------------------------------------------------------------------------- 1 | MACOSX_DEPLOYMENT_TARGET: 2 | - '11.0' 3 | MACOSX_SDK_VERSION: 4 | - '11.0' 5 | c_compiler: 6 | - clang 7 | c_compiler_version: 8 | - '19' 9 | c_stdlib: 10 | - macosx_deployment_target 11 | c_stdlib_version: 12 | - '11.0' 13 | channel_sources: 14 | - conda-forge 15 | channel_targets: 16 | - conda-forge main 17 | cuda_compiler: 18 | - cuda-nvcc 19 | cuda_compiler_version: 20 | - None 21 | cxx_compiler: 22 | - clangxx 23 | cxx_compiler_version: 24 | - '19' 25 | flatbuffers: 26 | - 25.2.10 27 | giflib: 28 | - '5.2' 29 | icu: 30 | - '75' 31 | libabseil: 32 | - '20250512' 33 | libcurl: 34 | - '8' 35 | libgrpc: 36 | - '1.73' 37 | libjpeg_turbo: 38 | - '3' 39 | libpng: 40 | - '1.6' 41 | libprotobuf: 42 | - 6.31.1 43 | libtensorflow_cc: 44 | - '2.16' 45 | macos_machine: 46 | - arm64-apple-darwin20.0.0 47 | microarch_level: 48 | - '1' 49 | numpy: 50 | - '2' 51 | openssl: 52 | - '3.5' 53 | pin_run_as_build: 54 | python: 55 | min_pin: x.x 56 | max_pin: x.x 57 | python: 58 | - 3.10.* *_cpython 59 | - 3.11.* *_cpython 60 | - 3.12.* *_cpython 61 | - 3.13.* *_cp313 62 | snappy: 63 | - '1.2' 64 | sqlite: 65 | - '3' 66 | target_platform: 67 | - osx-arm64 68 | tensorflow: 69 | - '2.16' 70 | zip_keys: 71 | - - c_compiler_version 72 | - cxx_compiler_version 73 | zlib: 74 | - '1' 75 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: jinja-yaml -*- 4 | 5 | version: 2 6 | 7 | jobs: 8 | build: 9 | working_directory: ~/test 10 | machine: 11 | image: ubuntu-2004:current 12 | steps: 13 | - run: 14 | # The Circle-CI build should not be active, but if this is not true for some reason, do a fast finish. 15 | command: exit 0 16 | 17 | workflows: 18 | version: 2 19 | build_and_test: 20 | jobs: 21 | - build: 22 | filters: 23 | branches: 24 | ignore: 25 | - /.*/ 26 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | 3 | *.patch binary 4 | *.diff binary 5 | meta.yaml text eol=lf 6 | build.sh text eol=lf 7 | bld.bat text eol=crlf 8 | 9 | # github helper pieces to make some files not show up in diffs automatically 10 | .azure-pipelines/* linguist-generated=true 11 | .circleci/* linguist-generated=true 12 | .ci_support/README linguist-generated=true 13 | .drone/* linguist-generated=true 14 | .drone.yml linguist-generated=true 15 | .github/* linguist-generated=true 16 | .travis/* linguist-generated=true 17 | .appveyor.yml linguist-generated=true 18 | .gitattributes linguist-generated=true 19 | .gitignore linguist-generated=true 20 | .travis.yml linguist-generated=true 21 | .scripts/* linguist-generated=true 22 | .woodpecker.yml linguist-generated=true 23 | /LICENSE.txt linguist-generated=true 24 | /README.md linguist-generated=true 25 | azure-pipelines.yml linguist-generated=true 26 | build-locally.py linguist-generated=true 27 | pixi.toml linguist-generated=true 28 | shippable.yml linguist-generated=true 29 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @farhantejani @ghego @h-vetinari @hajapy @hmaarrfk @isuruf @jschueller @ngam @njzjz @waitingkuo @wolfv @xhochy -------------------------------------------------------------------------------- /.github/workflows/conda-build.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | name: Build conda package 6 | on: 7 | push: 8 | 9 | pull_request: 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | build: 17 | name: ${{ matrix.CONFIG }} 18 | runs-on: ${{ matrix.runs_on }} 19 | timeout-minutes: 720 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | include: 24 | - CONFIG: linux_64_cuda_compiler_version12.8microarch_level1 25 | UPLOAD_PACKAGES: True 26 | os: ubuntu 27 | runs_on: ['cirun-openstack-cpu-2xlarge--${{ github.run_id }}-linux_64_cuda_compiler_version12.8microa_hc8cea6d2', 'linux', 'x64', 'self-hosted'] 28 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 29 | - CONFIG: linux_64_cuda_compiler_version12.8microarch_level3 30 | UPLOAD_PACKAGES: True 31 | os: ubuntu 32 | runs_on: ['cirun-openstack-cpu-2xlarge--${{ github.run_id }}-linux_64_cuda_compiler_version12.8microa_h82e5350b', 'linux', 'x64', 'self-hosted'] 33 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 34 | - CONFIG: linux_64_cuda_compiler_versionNonemicroarch_level1 35 | UPLOAD_PACKAGES: True 36 | os: ubuntu 37 | runs_on: ['cirun-openstack-cpu-2xlarge--${{ github.run_id }}-linux_64_cuda_compiler_versionNonemicroa_h0e112a19', 'linux', 'x64', 'self-hosted'] 38 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 39 | - CONFIG: linux_64_cuda_compiler_versionNonemicroarch_level3 40 | UPLOAD_PACKAGES: True 41 | os: ubuntu 42 | runs_on: ['cirun-openstack-cpu-2xlarge--${{ github.run_id }}-linux_64_cuda_compiler_versionNonemicroa_hb1134425', 'linux', 'x64', 'self-hosted'] 43 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 44 | - CONFIG: linux_aarch64_cuda_compiler_versionNone 45 | UPLOAD_PACKAGES: True 46 | os: ubuntu 47 | runs_on: ['cirun-openstack-cpu-2xlarge--${{ github.run_id }}-linux_aarch64_cuda_compiler_versionNone', 'linux', 'x64', 'self-hosted'] 48 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 49 | steps: 50 | 51 | - name: Checkout code 52 | uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 53 | 54 | - name: Build on Linux 55 | id: build-linux 56 | if: matrix.os == 'ubuntu' 57 | env: 58 | CONFIG: ${{ matrix.CONFIG }} 59 | UPLOAD_PACKAGES: ${{ matrix.UPLOAD_PACKAGES }} 60 | DOCKER_IMAGE: ${{ matrix.DOCKER_IMAGE }} 61 | CI: github_actions 62 | CONDA_FORGE_DOCKER_RUN_ARGS: "${{ matrix.CONDA_FORGE_DOCKER_RUN_ARGS }}" 63 | BINSTAR_TOKEN: ${{ secrets.BINSTAR_TOKEN }} 64 | FEEDSTOCK_TOKEN: ${{ secrets.FEEDSTOCK_TOKEN }} 65 | STAGING_BINSTAR_TOKEN: ${{ secrets.STAGING_BINSTAR_TOKEN }} 66 | shell: bash 67 | run: | 68 | if [[ "$(uname -m)" == "x86_64" ]]; then 69 | echo "::group::Configure binfmt_misc" 70 | docker run --rm --privileged multiarch/qemu-user-static:register --reset --credential yes 71 | fi 72 | export flow_run_id="github_$GITHUB_RUN_ID" 73 | export remote_url="https://github.com/$GITHUB_REPOSITORY" 74 | export sha="$GITHUB_SHA" 75 | export FEEDSTOCK_NAME="$(basename $GITHUB_REPOSITORY)" 76 | export GIT_BRANCH="$(basename $GITHUB_REF)" 77 | if [[ "${GITHUB_EVENT_NAME}" == "pull_request" ]]; then 78 | export IS_PR_BUILD="True" 79 | else 80 | export IS_PR_BUILD="False" 81 | fi 82 | echo "::endgroup::" 83 | ./.scripts/run_docker_build.sh 84 | 85 | - name: Build on macOS 86 | id: build-macos 87 | if: matrix.os == 'macos' 88 | env: 89 | CONFIG: ${{ matrix.CONFIG }} 90 | UPLOAD_PACKAGES: ${{ matrix.UPLOAD_PACKAGES }} 91 | CI: github_actions 92 | BINSTAR_TOKEN: ${{ secrets.BINSTAR_TOKEN }} 93 | FEEDSTOCK_TOKEN: ${{ secrets.FEEDSTOCK_TOKEN }} 94 | STAGING_BINSTAR_TOKEN: ${{ secrets.STAGING_BINSTAR_TOKEN }} 95 | shell: bash 96 | run: | 97 | export flow_run_id="github_$GITHUB_RUN_ID" 98 | export remote_url="https://github.com/$GITHUB_REPOSITORY" 99 | export sha="$GITHUB_SHA" 100 | export FEEDSTOCK_NAME="$(basename $GITHUB_REPOSITORY)" 101 | export GIT_BRANCH="$(basename $GITHUB_REF)" 102 | if [[ "${GITHUB_EVENT_NAME}" == "pull_request" ]]; then 103 | export IS_PR_BUILD="True" 104 | else 105 | export IS_PR_BUILD="False" 106 | fi 107 | ./.scripts/run_osx_build.sh 108 | 109 | - name: Build on windows 110 | id: build-windows 111 | if: matrix.os == 'windows' 112 | shell: cmd 113 | run: | 114 | set "flow_run_id=github_%GITHUB_RUN_ID%" 115 | set "remote_url=https://github.com/%GITHUB_REPOSITORY%" 116 | set "sha=%GITHUB_SHA%" 117 | call ".scripts\run_win_build.bat" 118 | env: 119 | # default value; make it explicit, as it needs to match with artefact 120 | # generation below. Not configurable for now, can be revisited later 121 | CONDA_BLD_DIR: C:\bld 122 | MINIFORGE_HOME: D:\Miniforge 123 | PYTHONUNBUFFERED: 1 124 | CONFIG: ${{ matrix.CONFIG }} 125 | CI: github_actions 126 | UPLOAD_PACKAGES: ${{ matrix.UPLOAD_PACKAGES }} 127 | BINSTAR_TOKEN: ${{ secrets.BINSTAR_TOKEN }} 128 | FEEDSTOCK_TOKEN: ${{ secrets.FEEDSTOCK_TOKEN }} 129 | STAGING_BINSTAR_TOKEN: ${{ secrets.STAGING_BINSTAR_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # User content belongs under recipe/. 2 | # Feedstock configuration goes in `conda-forge.yml` 3 | # Everything else is managed by the conda-smithy rerender process. 4 | # Please do not modify 5 | 6 | # Ignore all files and folders in root 7 | * 8 | !/conda-forge.yml 9 | 10 | # Don't ignore any files/folders if the parent folder is 'un-ignored' 11 | # This also avoids warnings when adding an already-checked file with an ignored parent. 12 | !/**/ 13 | # Don't ignore any files/folders recursively in the following folders 14 | !/recipe/** 15 | !/.ci_support/** 16 | 17 | # Since we ignore files/folders recursively, any folders inside 18 | # build_artifacts gets ignored which trips some build systems. 19 | # To avoid that we 'un-ignore' all files/folders recursively 20 | # and only ignore the root build_artifacts folder. 21 | !/build_artifacts/** 22 | /build_artifacts 23 | 24 | *.pyc 25 | 26 | # Rattler-build's artifacts are in `output` when not specifying anything. 27 | /output 28 | # Pixi's configuration 29 | .pixi 30 | -------------------------------------------------------------------------------- /.scripts/build_steps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 4 | # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 5 | # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 6 | # benefit from the improvement. 7 | 8 | # -*- mode: jinja-shell -*- 9 | 10 | set -xeuo pipefail 11 | export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}" 12 | source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh 13 | 14 | 15 | ( endgroup "Start Docker" ) 2> /dev/null 16 | 17 | ( startgroup "Configuring conda" ) 2> /dev/null 18 | 19 | export PYTHONUNBUFFERED=1 20 | export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}" 21 | export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support" 22 | export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml" 23 | 24 | cat >~/.condarc < /opt/conda/conda-meta/history 36 | micromamba install --root-prefix ~/.conda --prefix /opt/conda \ 37 | --yes --override-channels --channel conda-forge --strict-channel-priority \ 38 | pip python=3.12 conda-build conda-libmamba-solver conda-forge-ci-setup=4 "conda-build>=24.1" 39 | export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 40 | 41 | # set up the condarc 42 | setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 43 | 44 | source run_conda_forge_build_setup 45 | 46 | 47 | 48 | # make the build number clobber 49 | make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 50 | 51 | if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]] && [[ "${HOST_PLATFORM}" != linux-* ]] && [[ "${BUILD_WITH_CONDA_DEBUG:-0}" != 1 ]]; then 52 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" 53 | fi 54 | 55 | 56 | ( endgroup "Configuring conda" ) 2> /dev/null 57 | 58 | if [[ -f "${FEEDSTOCK_ROOT}/LICENSE.txt" ]]; then 59 | cp "${FEEDSTOCK_ROOT}/LICENSE.txt" "${RECIPE_ROOT}/recipe-scripts-license.txt" 60 | fi 61 | 62 | if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then 63 | if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then 64 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" 65 | fi 66 | conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ 67 | ${EXTRA_CB_OPTIONS:-} \ 68 | --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" 69 | 70 | # Drop into an interactive shell 71 | /bin/bash 72 | else 73 | CONDA_SOLVER=libmamba conda-build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ 74 | --suppress-variables ${EXTRA_CB_OPTIONS:-} \ 75 | --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" \ 76 | --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" 77 | ( startgroup "Inspecting artifacts" ) 2> /dev/null 78 | 79 | # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 80 | command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir "${RECIPE_ROOT}" -m "${CONFIG_FILE}" || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" 81 | 82 | ( endgroup "Inspecting artifacts" ) 2> /dev/null 83 | ( startgroup "Validating outputs" ) 2> /dev/null 84 | 85 | validate_recipe_outputs "${FEEDSTOCK_NAME}" 86 | 87 | ( endgroup "Validating outputs" ) 2> /dev/null 88 | 89 | ( startgroup "Uploading packages" ) 2> /dev/null 90 | 91 | if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then 92 | upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 93 | fi 94 | 95 | ( endgroup "Uploading packages" ) 2> /dev/null 96 | fi 97 | 98 | ( startgroup "Final checks" ) 2> /dev/null 99 | 100 | touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" 101 | -------------------------------------------------------------------------------- /.scripts/logging_utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Provide a unified interface for the different logging 4 | # utilities CI providers offer. If unavailable, provide 5 | # a compatible fallback (e.g. bare `echo xxxxxx`). 6 | 7 | function startgroup { 8 | # Start a foldable group of log lines 9 | # Pass a single argument, quoted 10 | case ${CI:-} in 11 | azure ) 12 | echo "##[group]$1";; 13 | travis ) 14 | echo "$1" 15 | echo -en 'travis_fold:start:'"${1// /}"'\r';; 16 | github_actions ) 17 | echo "::group::$1";; 18 | * ) 19 | echo "$1";; 20 | esac 21 | } 2> /dev/null 22 | 23 | function endgroup { 24 | # End a foldable group of log lines 25 | # Pass a single argument, quoted 26 | 27 | case ${CI:-} in 28 | azure ) 29 | echo "##[endgroup]";; 30 | travis ) 31 | echo -en 'travis_fold:end:'"${1// /}"'\r';; 32 | github_actions ) 33 | echo "::endgroup::";; 34 | esac 35 | } 2> /dev/null 36 | -------------------------------------------------------------------------------- /.scripts/run_docker_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 4 | # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 5 | # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 6 | # benefit from the improvement. 7 | 8 | source .scripts/logging_utils.sh 9 | 10 | ( startgroup "Configure Docker" ) 2> /dev/null 11 | 12 | set -xeo pipefail 13 | 14 | THISDIR="$( cd "$( dirname "$0" )" >/dev/null && pwd )" 15 | PROVIDER_DIR="$(basename "$THISDIR")" 16 | 17 | FEEDSTOCK_ROOT="$( cd "$( dirname "$0" )/.." >/dev/null && pwd )" 18 | RECIPE_ROOT="${FEEDSTOCK_ROOT}/recipe" 19 | 20 | if [ -z ${FEEDSTOCK_NAME} ]; then 21 | export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) 22 | fi 23 | 24 | if [[ "${sha:-}" == "" ]]; then 25 | pushd "${FEEDSTOCK_ROOT}" 26 | sha=$(git rev-parse HEAD) 27 | popd 28 | fi 29 | 30 | docker info 31 | 32 | # In order for the conda-build process in the container to write to the mounted 33 | # volumes, we need to run with the same id as the host machine, which is 34 | # normally the owner of the mounted volumes, or at least has write permission 35 | export HOST_USER_ID=$(id -u) 36 | # Check if docker-machine is being used (normally on OSX) and get the uid from 37 | # the VM 38 | if hash docker-machine 2> /dev/null && docker-machine active > /dev/null; then 39 | export HOST_USER_ID=$(docker-machine ssh $(docker-machine active) id -u) 40 | fi 41 | 42 | ARTIFACTS="$FEEDSTOCK_ROOT/build_artifacts" 43 | 44 | if [ -z "$CONFIG" ]; then 45 | set +x 46 | FILES=`ls .ci_support/linux_*` 47 | CONFIGS="" 48 | for file in $FILES; do 49 | CONFIGS="${CONFIGS}'${file:12:-5}' or "; 50 | done 51 | echo "Need to set CONFIG env variable. Value can be one of ${CONFIGS:0:-4}" 52 | exit 1 53 | fi 54 | 55 | if [ -z "${DOCKER_IMAGE}" ]; then 56 | SHYAML_INSTALLED="$(shyaml -h || echo NO)" 57 | if [ "${SHYAML_INSTALLED}" == "NO" ]; then 58 | echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Trying to parse with coreutils" 59 | DOCKER_IMAGE=$(cat .ci_support/${CONFIG}.yaml | grep '^docker_image:$' -A 1 | tail -n 1 | cut -b 3-) 60 | if [ "${DOCKER_IMAGE}" = "" ]; then 61 | echo "No docker_image entry found in ${CONFIG}. Falling back to quay.io/condaforge/linux-anvil-comp7" 62 | DOCKER_IMAGE="quay.io/condaforge/linux-anvil-comp7" 63 | fi 64 | else 65 | DOCKER_IMAGE="$(cat "${FEEDSTOCK_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 quay.io/condaforge/linux-anvil-comp7 )" 66 | fi 67 | fi 68 | 69 | mkdir -p "$ARTIFACTS" 70 | DONE_CANARY="$ARTIFACTS/conda-forge-build-done-${CONFIG}" 71 | rm -f "$DONE_CANARY" 72 | 73 | # Allow people to specify extra default arguments to `docker run` (e.g. `--rm`) 74 | DOCKER_RUN_ARGS="${CONDA_FORGE_DOCKER_RUN_ARGS}" 75 | if [ -z "${CI}" ]; then 76 | DOCKER_RUN_ARGS="-it ${DOCKER_RUN_ARGS}" 77 | fi 78 | 79 | ( endgroup "Configure Docker" ) 2> /dev/null 80 | 81 | ( startgroup "Start Docker" ) 2> /dev/null 82 | 83 | export UPLOAD_PACKAGES="${UPLOAD_PACKAGES:-True}" 84 | export IS_PR_BUILD="${IS_PR_BUILD:-False}" 85 | docker pull "${DOCKER_IMAGE}" 86 | docker run ${DOCKER_RUN_ARGS} \ 87 | -v "${RECIPE_ROOT}":/home/conda/recipe_root:rw,z,delegated \ 88 | -v "${FEEDSTOCK_ROOT}":/home/conda/feedstock_root:rw,z,delegated \ 89 | -e CONFIG \ 90 | -e HOST_USER_ID \ 91 | -e UPLOAD_PACKAGES \ 92 | -e IS_PR_BUILD \ 93 | -e GIT_BRANCH \ 94 | -e UPLOAD_ON_BRANCH \ 95 | -e CI \ 96 | -e FEEDSTOCK_NAME \ 97 | -e CPU_COUNT \ 98 | -e BUILD_WITH_CONDA_DEBUG \ 99 | -e BUILD_OUTPUT_ID \ 100 | -e flow_run_id \ 101 | -e remote_url \ 102 | -e sha \ 103 | -e BINSTAR_TOKEN \ 104 | -e FEEDSTOCK_TOKEN \ 105 | -e STAGING_BINSTAR_TOKEN \ 106 | "${DOCKER_IMAGE}" \ 107 | bash \ 108 | "/home/conda/feedstock_root/${PROVIDER_DIR}/build_steps.sh" 109 | 110 | # verify that the end of the script was reached 111 | test -f "$DONE_CANARY" 112 | 113 | # This closes the last group opened in `build_steps.sh` 114 | ( endgroup "Final checks" ) 2> /dev/null 115 | -------------------------------------------------------------------------------- /.scripts/run_osx_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # -*- mode: jinja-shell -*- 4 | 5 | source .scripts/logging_utils.sh 6 | 7 | set -xe 8 | 9 | MINIFORGE_HOME="${MINIFORGE_HOME:-${HOME}/miniforge3}" 10 | MINIFORGE_HOME="${MINIFORGE_HOME%/}" # remove trailing slash 11 | export CONDA_BLD_PATH="${CONDA_BLD_PATH:-${MINIFORGE_HOME}/conda-bld}" 12 | 13 | ( startgroup "Provisioning base env with micromamba" ) 2> /dev/null 14 | MICROMAMBA_VERSION="1.5.10-0" 15 | if [[ "$(uname -m)" == "arm64" ]]; then 16 | osx_arch="osx-arm64" 17 | else 18 | osx_arch="osx-64" 19 | fi 20 | MICROMAMBA_URL="https://github.com/mamba-org/micromamba-releases/releases/download/${MICROMAMBA_VERSION}/micromamba-${osx_arch}" 21 | MAMBA_ROOT_PREFIX="${MINIFORGE_HOME}-micromamba-$(date +%s)" 22 | echo "Downloading micromamba ${MICROMAMBA_VERSION}" 23 | micromamba_exe="$(mktemp -d)/micromamba" 24 | curl -L -o "${micromamba_exe}" "${MICROMAMBA_URL}" 25 | chmod +x "${micromamba_exe}" 26 | echo "Creating environment" 27 | "${micromamba_exe}" create --yes --root-prefix "${MAMBA_ROOT_PREFIX}" --prefix "${MINIFORGE_HOME}" \ 28 | --channel conda-forge \ 29 | pip python=3.12 conda-build conda-libmamba-solver conda-forge-ci-setup=4 "conda-build>=24.1" 30 | echo "Moving pkgs cache from ${MAMBA_ROOT_PREFIX} to ${MINIFORGE_HOME}" 31 | mv "${MAMBA_ROOT_PREFIX}/pkgs" "${MINIFORGE_HOME}" 32 | echo "Cleaning up micromamba" 33 | rm -rf "${MAMBA_ROOT_PREFIX}" "${micromamba_exe}" || true 34 | ( endgroup "Provisioning base env with micromamba" ) 2> /dev/null 35 | 36 | ( startgroup "Configuring conda" ) 2> /dev/null 37 | echo "Activating environment" 38 | source "${MINIFORGE_HOME}/etc/profile.d/conda.sh" 39 | conda activate base 40 | export CONDA_SOLVER="libmamba" 41 | export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 42 | 43 | 44 | 45 | 46 | 47 | echo -e "\n\nSetting up the condarc and mangling the compiler." 48 | setup_conda_rc ./ ./recipe ./.ci_support/${CONFIG}.yaml 49 | 50 | if [[ "${CI:-}" != "" ]]; then 51 | mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml 52 | fi 53 | 54 | if [[ "${CI:-}" != "" ]]; then 55 | echo -e "\n\nMangling homebrew in the CI to avoid conflicts." 56 | /usr/bin/sudo mangle_homebrew 57 | /usr/bin/sudo -k 58 | else 59 | echo -e "\n\nNot mangling homebrew as we are not running in CI" 60 | fi 61 | 62 | if [[ "${sha:-}" == "" ]]; then 63 | sha=$(git rev-parse HEAD) 64 | fi 65 | 66 | echo -e "\n\nRunning the build setup script." 67 | source run_conda_forge_build_setup 68 | 69 | 70 | 71 | ( endgroup "Configuring conda" ) 2> /dev/null 72 | 73 | echo -e "\n\nMaking the build clobber file" 74 | make_build_number ./ ./recipe ./.ci_support/${CONFIG}.yaml 75 | 76 | if [[ -f LICENSE.txt ]]; then 77 | cp LICENSE.txt "recipe/recipe-scripts-license.txt" 78 | fi 79 | 80 | if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then 81 | if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then 82 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" 83 | fi 84 | conda debug ./recipe -m ./.ci_support/${CONFIG}.yaml \ 85 | ${EXTRA_CB_OPTIONS:-} \ 86 | --clobber-file ./.ci_support/clobber_${CONFIG}.yaml 87 | 88 | # Drop into an interactive shell 89 | /bin/bash 90 | else 91 | 92 | if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]]; then 93 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" 94 | fi 95 | 96 | CONDA_SOLVER=libmamba conda-build ./recipe -m ./.ci_support/${CONFIG}.yaml \ 97 | --suppress-variables ${EXTRA_CB_OPTIONS:-} \ 98 | --clobber-file ./.ci_support/clobber_${CONFIG}.yaml \ 99 | --extra-meta flow_run_id="$flow_run_id" remote_url="$remote_url" sha="$sha" 100 | 101 | ( startgroup "Inspecting artifacts" ) 2> /dev/null 102 | 103 | # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 104 | command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir ./recipe -m ./.ci_support/${CONFIG}.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" 105 | 106 | ( endgroup "Inspecting artifacts" ) 2> /dev/null 107 | ( startgroup "Validating outputs" ) 2> /dev/null 108 | 109 | validate_recipe_outputs "${FEEDSTOCK_NAME}" 110 | 111 | ( endgroup "Validating outputs" ) 2> /dev/null 112 | 113 | ( startgroup "Uploading packages" ) 2> /dev/null 114 | 115 | if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then 116 | upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" ./ ./recipe ./.ci_support/${CONFIG}.yaml 117 | fi 118 | 119 | ( endgroup "Uploading packages" ) 2> /dev/null 120 | fi 121 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | BSD-3-Clause license 2 | Copyright (c) 2015-2022, conda-forge contributors 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 3. Neither the name of the copyright holder nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 | ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR 21 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 23 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 24 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 25 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 26 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 27 | DAMAGE. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | About tensorflow-feedstock 2 | ========================== 3 | 4 | Feedstock license: [BSD-3-Clause](https://github.com/conda-forge/tensorflow-feedstock/blob/main/LICENSE.txt) 5 | 6 | Home: http://tensorflow.org/ 7 | 8 | Package license: Apache-2.0 9 | 10 | Summary: TensorFlow is an end-to-end open source platform for machine learning. 11 | 12 | Development: https://github.com/tensorflow/tensorflow 13 | 14 | Documentation: https://www.tensorflow.org/get_started/get_started 15 | 16 | TensorFlow offers multiple levels of abstraction so you can choose the 17 | right one for your needs. Build and train models by using the high-level 18 | Keras API, which makes getting started with TensorFlow and machine learning 19 | easy. 20 | 21 | 22 | Current build status 23 | ==================== 24 | 25 | 26 | 27 | 28 | 29 | 30 | 93 | 94 |
Azure 31 |
32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 46 | 47 | 48 | 53 | 54 | 55 | 60 | 61 | 62 | 67 | 68 | 69 | 74 | 75 | 76 | 81 | 82 | 83 | 88 | 89 | 90 |
VariantStatus
linux_64_cuda_compiler_version12.8microarch_level1 42 | 43 | variant 44 | 45 |
linux_64_cuda_compiler_version12.8microarch_level3 49 | 50 | variant 51 | 52 |
linux_64_cuda_compiler_versionNonemicroarch_level1 56 | 57 | variant 58 | 59 |
linux_64_cuda_compiler_versionNonemicroarch_level3 63 | 64 | variant 65 | 66 |
linux_aarch64_cuda_compiler_versionNone 70 | 71 | variant 72 | 73 |
osx_64 77 | 78 | variant 79 | 80 |
osx_arm64 84 | 85 | variant 86 | 87 |
91 |
92 |
95 | 96 | Current release info 97 | ==================== 98 | 99 | | Name | Downloads | Version | Platforms | 100 | | --- | --- | --- | --- | 101 | | [![Conda Recipe](https://img.shields.io/badge/recipe-libtensorflow-green.svg)](https://anaconda.org/conda-forge/libtensorflow) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libtensorflow.svg)](https://anaconda.org/conda-forge/libtensorflow) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libtensorflow.svg)](https://anaconda.org/conda-forge/libtensorflow) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libtensorflow.svg)](https://anaconda.org/conda-forge/libtensorflow) | 102 | | [![Conda Recipe](https://img.shields.io/badge/recipe-libtensorflow__cc-green.svg)](https://anaconda.org/conda-forge/libtensorflow_cc) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libtensorflow_cc.svg)](https://anaconda.org/conda-forge/libtensorflow_cc) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libtensorflow_cc.svg)](https://anaconda.org/conda-forge/libtensorflow_cc) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libtensorflow_cc.svg)](https://anaconda.org/conda-forge/libtensorflow_cc) | 103 | | [![Conda Recipe](https://img.shields.io/badge/recipe-libtensorflow__framework-green.svg)](https://anaconda.org/conda-forge/libtensorflow_framework) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libtensorflow_framework.svg)](https://anaconda.org/conda-forge/libtensorflow_framework) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libtensorflow_framework.svg)](https://anaconda.org/conda-forge/libtensorflow_framework) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libtensorflow_framework.svg)](https://anaconda.org/conda-forge/libtensorflow_framework) | 104 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow-green.svg)](https://anaconda.org/conda-forge/tensorflow) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow.svg)](https://anaconda.org/conda-forge/tensorflow) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow.svg)](https://anaconda.org/conda-forge/tensorflow) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow.svg)](https://anaconda.org/conda-forge/tensorflow) | 105 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--avx2-green.svg)](https://anaconda.org/conda-forge/tensorflow-avx2) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-avx2.svg)](https://anaconda.org/conda-forge/tensorflow-avx2) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-avx2.svg)](https://anaconda.org/conda-forge/tensorflow-avx2) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-avx2.svg)](https://anaconda.org/conda-forge/tensorflow-avx2) | 106 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--base-green.svg)](https://anaconda.org/conda-forge/tensorflow-base) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-base.svg)](https://anaconda.org/conda-forge/tensorflow-base) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-base.svg)](https://anaconda.org/conda-forge/tensorflow-base) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-base.svg)](https://anaconda.org/conda-forge/tensorflow-base) | 107 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--cpu-green.svg)](https://anaconda.org/conda-forge/tensorflow-cpu) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-cpu.svg)](https://anaconda.org/conda-forge/tensorflow-cpu) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-cpu.svg)](https://anaconda.org/conda-forge/tensorflow-cpu) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-cpu.svg)](https://anaconda.org/conda-forge/tensorflow-cpu) | 108 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--estimator-green.svg)](https://anaconda.org/conda-forge/tensorflow-estimator) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-estimator.svg)](https://anaconda.org/conda-forge/tensorflow-estimator) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-estimator.svg)](https://anaconda.org/conda-forge/tensorflow-estimator) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-estimator.svg)](https://anaconda.org/conda-forge/tensorflow-estimator) | 109 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--gpu-green.svg)](https://anaconda.org/conda-forge/tensorflow-gpu) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-gpu.svg)](https://anaconda.org/conda-forge/tensorflow-gpu) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-gpu.svg)](https://anaconda.org/conda-forge/tensorflow-gpu) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-gpu.svg)](https://anaconda.org/conda-forge/tensorflow-gpu) | 110 | | [![Conda Recipe](https://img.shields.io/badge/recipe-tensorflow--sse3-green.svg)](https://anaconda.org/conda-forge/tensorflow-sse3) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/tensorflow-sse3.svg)](https://anaconda.org/conda-forge/tensorflow-sse3) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tensorflow-sse3.svg)](https://anaconda.org/conda-forge/tensorflow-sse3) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/tensorflow-sse3.svg)](https://anaconda.org/conda-forge/tensorflow-sse3) | 111 | 112 | Installing tensorflow 113 | ===================== 114 | 115 | Installing `tensorflow` from the `conda-forge` channel can be achieved by adding `conda-forge` to your channels with: 116 | 117 | ``` 118 | conda config --add channels conda-forge 119 | conda config --set channel_priority strict 120 | ``` 121 | 122 | Once the `conda-forge` channel has been enabled, `libtensorflow, libtensorflow_cc, libtensorflow_framework, tensorflow, tensorflow-avx2, tensorflow-base, tensorflow-cpu, tensorflow-estimator, tensorflow-gpu, tensorflow-sse3` can be installed with `conda`: 123 | 124 | ``` 125 | conda install libtensorflow libtensorflow_cc libtensorflow_framework tensorflow tensorflow-avx2 tensorflow-base tensorflow-cpu tensorflow-estimator tensorflow-gpu tensorflow-sse3 126 | ``` 127 | 128 | or with `mamba`: 129 | 130 | ``` 131 | mamba install libtensorflow libtensorflow_cc libtensorflow_framework tensorflow tensorflow-avx2 tensorflow-base tensorflow-cpu tensorflow-estimator tensorflow-gpu tensorflow-sse3 132 | ``` 133 | 134 | It is possible to list all of the versions of `libtensorflow` available on your platform with `conda`: 135 | 136 | ``` 137 | conda search libtensorflow --channel conda-forge 138 | ``` 139 | 140 | or with `mamba`: 141 | 142 | ``` 143 | mamba search libtensorflow --channel conda-forge 144 | ``` 145 | 146 | Alternatively, `mamba repoquery` may provide more information: 147 | 148 | ``` 149 | # Search all versions available on your platform: 150 | mamba repoquery search libtensorflow --channel conda-forge 151 | 152 | # List packages depending on `libtensorflow`: 153 | mamba repoquery whoneeds libtensorflow --channel conda-forge 154 | 155 | # List dependencies of `libtensorflow`: 156 | mamba repoquery depends libtensorflow --channel conda-forge 157 | ``` 158 | 159 | 160 | About conda-forge 161 | ================= 162 | 163 | [![Powered by 164 | NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org) 165 | 166 | conda-forge is a community-led conda channel of installable packages. 167 | In order to provide high-quality builds, the process has been automated into the 168 | conda-forge GitHub organization. The conda-forge organization contains one repository 169 | for each of the installable packages. Such a repository is known as a *feedstock*. 170 | 171 | A feedstock is made up of a conda recipe (the instructions on what and how to build 172 | the package) and the necessary configurations for automatic building using freely 173 | available continuous integration services. Thanks to the awesome service provided by 174 | [Azure](https://azure.microsoft.com/en-us/services/devops/), [GitHub](https://github.com/), 175 | [CircleCI](https://circleci.com/), [AppVeyor](https://www.appveyor.com/), 176 | [Drone](https://cloud.drone.io/welcome), and [TravisCI](https://travis-ci.com/) 177 | it is possible to build and upload installable packages to the 178 | [conda-forge](https://anaconda.org/conda-forge) [anaconda.org](https://anaconda.org/) 179 | channel for Linux, Windows and OSX respectively. 180 | 181 | To manage the continuous integration and simplify feedstock maintenance, 182 | [conda-smithy](https://github.com/conda-forge/conda-smithy) has been developed. 183 | Using the ``conda-forge.yml`` within this repository, it is possible to re-render all of 184 | this feedstock's supporting files (e.g. the CI configuration files) with ``conda smithy rerender``. 185 | 186 | For more information, please check the [conda-forge documentation](https://conda-forge.org/docs/). 187 | 188 | Terminology 189 | =========== 190 | 191 | **feedstock** - the conda recipe (raw material), supporting scripts and CI configuration. 192 | 193 | **conda-smithy** - the tool which helps orchestrate the feedstock. 194 | Its primary use is in the construction of the CI ``.yml`` files 195 | and simplify the management of *many* feedstocks. 196 | 197 | **conda-forge** - the place where the feedstock and smithy live and work to 198 | produce the finished article (built conda distributions) 199 | 200 | 201 | Updating tensorflow-feedstock 202 | ============================= 203 | 204 | If you would like to improve the tensorflow recipe or build a new 205 | package version, please fork this repository and submit a PR. Upon submission, 206 | your changes will be run on the appropriate platforms to give the reviewer an 207 | opportunity to confirm that the changes result in a successful build. Once 208 | merged, the recipe will be re-built and uploaded automatically to the 209 | `conda-forge` channel, whereupon the built conda packages will be available for 210 | everybody to install and use from the `conda-forge` channel. 211 | Note that all branches in the conda-forge/tensorflow-feedstock are 212 | immediately built and any created packages are uploaded, so PRs should be based 213 | on branches in forks, and branches in the main repository should only be used to 214 | build distinct package versions. 215 | 216 | In order to produce a uniquely identifiable distribution: 217 | * If the version of a package **is not** being increased, please add or increase 218 | the [``build/number``](https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#build-number-and-string). 219 | * If the version of a package **is** being increased, please remember to return 220 | the [``build/number``](https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#build-number-and-string) 221 | back to 0. 222 | 223 | Feedstock Maintainers 224 | ===================== 225 | 226 | * [@farhantejani](https://github.com/farhantejani/) 227 | * [@ghego](https://github.com/ghego/) 228 | * [@h-vetinari](https://github.com/h-vetinari/) 229 | * [@hajapy](https://github.com/hajapy/) 230 | * [@hmaarrfk](https://github.com/hmaarrfk/) 231 | * [@isuruf](https://github.com/isuruf/) 232 | * [@jschueller](https://github.com/jschueller/) 233 | * [@ngam](https://github.com/ngam/) 234 | * [@njzjz](https://github.com/njzjz/) 235 | * [@waitingkuo](https://github.com/waitingkuo/) 236 | * [@wolfv](https://github.com/wolfv/) 237 | * [@xhochy](https://github.com/xhochy/) 238 | 239 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | stages: 6 | - stage: Check 7 | jobs: 8 | - job: Skip 9 | pool: 10 | vmImage: 'ubuntu-22.04' 11 | variables: 12 | DECODE_PERCENTS: 'false' 13 | RET: 'true' 14 | steps: 15 | - checkout: self 16 | fetchDepth: '2' 17 | - bash: | 18 | git_log=`git log --max-count=1 --skip=1 --pretty=format:"%B" | tr "\n" " "` 19 | echo "##vso[task.setvariable variable=log]$git_log" 20 | displayName: Obtain commit message 21 | - bash: echo "##vso[task.setvariable variable=RET]false" 22 | condition: and(eq(variables['Build.Reason'], 'PullRequest'), or(contains(variables.log, '[skip azp]'), contains(variables.log, '[azp skip]'), contains(variables.log, '[skip ci]'), contains(variables.log, '[ci skip]'))) 23 | displayName: Skip build? 24 | - bash: echo "##vso[task.setvariable variable=start_main;isOutput=true]$RET" 25 | name: result 26 | displayName: Export result 27 | - stage: Build 28 | condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true')) 29 | dependsOn: Check 30 | jobs: 31 | - template: ./.azure-pipelines/azure-pipelines-osx.yml -------------------------------------------------------------------------------- /build-locally.py: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | """exec" "python3" "$0" "$@" #""" # fmt: off # fmt: on 3 | # 4 | # This file has been generated by conda-smithy in order to build the recipe 5 | # locally. 6 | # 7 | # The line above this comment is a bash / sh / zsh guard 8 | # to stop people from running it with the wrong interpreter 9 | import glob 10 | import os 11 | import platform 12 | import subprocess 13 | import sys 14 | from argparse import ArgumentParser 15 | 16 | 17 | def setup_environment(ns): 18 | os.environ["CONFIG"] = ns.config 19 | os.environ["UPLOAD_PACKAGES"] = "False" 20 | os.environ["IS_PR_BUILD"] = "True" 21 | if ns.debug: 22 | os.environ["BUILD_WITH_CONDA_DEBUG"] = "1" 23 | if ns.output_id: 24 | os.environ["BUILD_OUTPUT_ID"] = ns.output_id 25 | if "MINIFORGE_HOME" not in os.environ: 26 | os.environ["MINIFORGE_HOME"] = os.path.join( 27 | os.path.dirname(__file__), "miniforge3" 28 | ) 29 | 30 | # The default cache location might not be writable using docker on macOS. 31 | if ns.config.startswith("linux") and platform.system() == "Darwin": 32 | os.environ["CONDA_FORGE_DOCKER_RUN_ARGS"] = ( 33 | os.environ.get("CONDA_FORGE_DOCKER_RUN_ARGS", "") 34 | + " -e RATTLER_CACHE_DIR=/tmp/rattler_cache" 35 | ) 36 | 37 | 38 | def run_docker_build(ns): 39 | script = ".scripts/run_docker_build.sh" 40 | subprocess.check_call([script]) 41 | 42 | 43 | def run_osx_build(ns): 44 | script = ".scripts/run_osx_build.sh" 45 | subprocess.check_call([script]) 46 | 47 | 48 | def run_win_build(ns): 49 | script = ".scripts/run_win_build.bat" 50 | subprocess.check_call(["cmd", "/D", "/Q", "/C", f"CALL {script}"]) 51 | 52 | 53 | def verify_config(ns): 54 | choices_filter = ns.filter or "*" 55 | valid_configs = { 56 | os.path.basename(f)[:-5] 57 | for f in glob.glob(f".ci_support/{choices_filter}.yaml") 58 | } 59 | if choices_filter != "*": 60 | print(f"filtering for '{choices_filter}.yaml' configs") 61 | print(f"valid configs are {valid_configs}") 62 | if ns.config in valid_configs: 63 | print("Using " + ns.config + " configuration") 64 | return 65 | elif len(valid_configs) == 1: 66 | ns.config = valid_configs.pop() 67 | print("Found " + ns.config + " configuration") 68 | elif ns.config is None: 69 | print("config not selected, please choose from the following:\n") 70 | selections = list(enumerate(sorted(valid_configs), 1)) 71 | for i, c in selections: 72 | print(f"{i}. {c}") 73 | try: 74 | s = input("\n> ") 75 | except KeyboardInterrupt: 76 | print("\nno option selected, bye!", file=sys.stderr) 77 | sys.exit(1) 78 | idx = int(s) - 1 79 | ns.config = selections[idx][1] 80 | print(f"selected {ns.config}") 81 | else: 82 | raise ValueError("config " + ns.config + " is not valid") 83 | if ( 84 | ns.config.startswith("osx") 85 | and platform.system() == "Darwin" 86 | and not os.environ.get("OSX_SDK_DIR") 87 | ): 88 | raise RuntimeError( 89 | "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=$PWD/SDKs' " 90 | "to download the SDK automatically to '$PWD/SDKs/MacOSX.sdk'. " 91 | "Note: OSX_SDK_DIR must be set to an absolute path. " 92 | "Setting this variable implies agreement to the licensing terms of the SDK by Apple." 93 | ) 94 | 95 | 96 | def main(args=None): 97 | p = ArgumentParser("build-locally") 98 | p.add_argument("config", default=None, nargs="?") 99 | p.add_argument( 100 | "--filter", 101 | default=None, 102 | help="Glob string to filter which build choices are presented in interactive mode.", 103 | ) 104 | p.add_argument( 105 | "--debug", 106 | action="store_true", 107 | help="Setup debug environment using `conda debug`", 108 | ) 109 | p.add_argument("--output-id", help="If running debug, specify the output to setup.") 110 | 111 | ns = p.parse_args(args=args) 112 | verify_config(ns) 113 | setup_environment(ns) 114 | 115 | try: 116 | if ns.config.startswith("linux") or ( 117 | ns.config.startswith("osx") and platform.system() == "Linux" 118 | ): 119 | run_docker_build(ns) 120 | elif ns.config.startswith("osx"): 121 | run_osx_build(ns) 122 | elif ns.config.startswith("win"): 123 | run_win_build(ns) 124 | finally: 125 | recipe_license_file = os.path.join("recipe", "recipe-scripts-license.txt") 126 | if os.path.exists(recipe_license_file): 127 | os.remove(recipe_license_file) 128 | 129 | 130 | if __name__ == "__main__": 131 | main() 132 | -------------------------------------------------------------------------------- /conda-forge.yml: -------------------------------------------------------------------------------- 1 | azure: 2 | free_disk_space: true 3 | settings_linux: 4 | swapfile_size: 10GiB 5 | build_platform: 6 | linux_aarch64: linux_64 7 | osx_arm64: osx_64 8 | conda_build: 9 | pkg_format: '2' 10 | conda_build_tool: conda-build+conda-libmamba-solver 11 | conda_forge_output_validation: true 12 | github: 13 | branch_name: main 14 | tooling_branch_name: main 15 | github_actions: 16 | self_hosted: true 17 | timeout_minutes: 720 18 | triggers: 19 | - push 20 | - pull_request 21 | provider: 22 | linux_64: github_actions 23 | linux_aarch64: default 24 | test: native_and_emulated 25 | -------------------------------------------------------------------------------- /recipe/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2019 The TensorFlow Authors. All rights reserved. 2 | 3 | Apache License 4 | Version 2.0, January 2004 5 | http://www.apache.org/licenses/ 6 | 7 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 8 | 9 | 1. Definitions. 10 | 11 | "License" shall mean the terms and conditions for use, reproduction, 12 | and distribution as defined by Sections 1 through 9 of this document. 13 | 14 | "Licensor" shall mean the copyright owner or entity authorized by 15 | the copyright owner that is granting the License. 16 | 17 | "Legal Entity" shall mean the union of the acting entity and all 18 | other entities that control, are controlled by, or are under common 19 | control with that entity. For the purposes of this definition, 20 | "control" means (i) the power, direct or indirect, to cause the 21 | direction or management of such entity, whether by contract or 22 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 23 | outstanding shares, or (iii) beneficial ownership of such entity. 24 | 25 | "You" (or "Your") shall mean an individual or Legal Entity 26 | exercising permissions granted by this License. 27 | 28 | "Source" form shall mean the preferred form for making modifications, 29 | including but not limited to software source code, documentation 30 | source, and configuration files. 31 | 32 | "Object" form shall mean any form resulting from mechanical 33 | transformation or translation of a Source form, including but 34 | not limited to compiled object code, generated documentation, 35 | and conversions to other media types. 36 | 37 | "Work" shall mean the work of authorship, whether in Source or 38 | Object form, made available under the License, as indicated by a 39 | copyright notice that is included in or attached to the work 40 | (an example is provided in the Appendix below). 41 | 42 | "Derivative Works" shall mean any work, whether in Source or Object 43 | form, that is based on (or derived from) the Work and for which the 44 | editorial revisions, annotations, elaborations, or other modifications 45 | represent, as a whole, an original work of authorship. For the purposes 46 | of this License, Derivative Works shall not include works that remain 47 | separable from, or merely link (or bind by name) to the interfaces of, 48 | the Work and Derivative Works thereof. 49 | 50 | "Contribution" shall mean any work of authorship, including 51 | the original version of the Work and any modifications or additions 52 | to that Work or Derivative Works thereof, that is intentionally 53 | submitted to Licensor for inclusion in the Work by the copyright owner 54 | or by an individual or Legal Entity authorized to submit on behalf of 55 | the copyright owner. For the purposes of this definition, "submitted" 56 | means any form of electronic, verbal, or written communication sent 57 | to the Licensor or its representatives, including but not limited to 58 | communication on electronic mailing lists, source code control systems, 59 | and issue tracking systems that are managed by, or on behalf of, the 60 | Licensor for the purpose of discussing and improving the Work, but 61 | excluding communication that is conspicuously marked or otherwise 62 | designated in writing by the copyright owner as "Not a Contribution." 63 | 64 | "Contributor" shall mean Licensor and any individual or Legal Entity 65 | on behalf of whom a Contribution has been received by Licensor and 66 | subsequently incorporated within the Work. 67 | 68 | 2. Grant of Copyright License. Subject to the terms and conditions of 69 | this License, each Contributor hereby grants to You a perpetual, 70 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 71 | copyright license to reproduce, prepare Derivative Works of, 72 | publicly display, publicly perform, sublicense, and distribute the 73 | Work and such Derivative Works in Source or Object form. 74 | 75 | 3. Grant of Patent License. Subject to the terms and conditions of 76 | this License, each Contributor hereby grants to You a perpetual, 77 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 78 | (except as stated in this section) patent license to make, have made, 79 | use, offer to sell, sell, import, and otherwise transfer the Work, 80 | where such license applies only to those patent claims licensable 81 | by such Contributor that are necessarily infringed by their 82 | Contribution(s) alone or by combination of their Contribution(s) 83 | with the Work to which such Contribution(s) was submitted. If You 84 | institute patent litigation against any entity (including a 85 | cross-claim or counterclaim in a lawsuit) alleging that the Work 86 | or a Contribution incorporated within the Work constitutes direct 87 | or contributory patent infringement, then any patent licenses 88 | granted to You under this License for that Work shall terminate 89 | as of the date such litigation is filed. 90 | 91 | 4. Redistribution. You may reproduce and distribute copies of the 92 | Work or Derivative Works thereof in any medium, with or without 93 | modifications, and in Source or Object form, provided that You 94 | meet the following conditions: 95 | 96 | (a) You must give any other recipients of the Work or 97 | Derivative Works a copy of this License; and 98 | 99 | (b) You must cause any modified files to carry prominent notices 100 | stating that You changed the files; and 101 | 102 | (c) You must retain, in the Source form of any Derivative Works 103 | that You distribute, all copyright, patent, trademark, and 104 | attribution notices from the Source form of the Work, 105 | excluding those notices that do not pertain to any part of 106 | the Derivative Works; and 107 | 108 | (d) If the Work includes a "NOTICE" text file as part of its 109 | distribution, then any Derivative Works that You distribute must 110 | include a readable copy of the attribution notices contained 111 | within such NOTICE file, excluding those notices that do not 112 | pertain to any part of the Derivative Works, in at least one 113 | of the following places: within a NOTICE text file distributed 114 | as part of the Derivative Works; within the Source form or 115 | documentation, if provided along with the Derivative Works; or, 116 | within a display generated by the Derivative Works, if and 117 | wherever such third-party notices normally appear. The contents 118 | of the NOTICE file are for informational purposes only and 119 | do not modify the License. You may add Your own attribution 120 | notices within Derivative Works that You distribute, alongside 121 | or as an addendum to the NOTICE text from the Work, provided 122 | that such additional attribution notices cannot be construed 123 | as modifying the License. 124 | 125 | You may add Your own copyright statement to Your modifications and 126 | may provide additional or different license terms and conditions 127 | for use, reproduction, or distribution of Your modifications, or 128 | for any such Derivative Works as a whole, provided Your use, 129 | reproduction, and distribution of the Work otherwise complies with 130 | the conditions stated in this License. 131 | 132 | 5. Submission of Contributions. Unless You explicitly state otherwise, 133 | any Contribution intentionally submitted for inclusion in the Work 134 | by You to the Licensor shall be under the terms and conditions of 135 | this License, without any additional terms or conditions. 136 | Notwithstanding the above, nothing herein shall supersede or modify 137 | the terms of any separate license agreement you may have executed 138 | with Licensor regarding such Contributions. 139 | 140 | 6. Trademarks. This License does not grant permission to use the trade 141 | names, trademarks, service marks, or product names of the Licensor, 142 | except as required for reasonable and customary use in describing the 143 | origin of the Work and reproducing the content of the NOTICE file. 144 | 145 | 7. Disclaimer of Warranty. Unless required by applicable law or 146 | agreed to in writing, Licensor provides the Work (and each 147 | Contributor provides its Contributions) on an "AS IS" BASIS, 148 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 149 | implied, including, without limitation, any warranties or conditions 150 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 151 | PARTICULAR PURPOSE. You are solely responsible for determining the 152 | appropriateness of using or redistributing the Work and assume any 153 | risks associated with Your exercise of permissions under this License. 154 | 155 | 8. Limitation of Liability. In no event and under no legal theory, 156 | whether in tort (including negligence), contract, or otherwise, 157 | unless required by applicable law (such as deliberate and grossly 158 | negligent acts) or agreed to in writing, shall any Contributor be 159 | liable to You for damages, including any direct, indirect, special, 160 | incidental, or consequential damages of any character arising as a 161 | result of this License or out of the use or inability to use the 162 | Work (including but not limited to damages for loss of goodwill, 163 | work stoppage, computer failure or malfunction, or any and all 164 | other commercial damages or losses), even if such Contributor 165 | has been advised of the possibility of such damages. 166 | 167 | 9. Accepting Warranty or Additional Liability. While redistributing 168 | the Work or Derivative Works thereof, You may choose to offer, 169 | and charge a fee for, acceptance of support, warranty, indemnity, 170 | or other liability obligations and/or rights consistent with this 171 | License. However, in accepting such obligations, You may act only 172 | on Your own behalf and on Your sole responsibility, not on behalf 173 | of any other Contributor, and only if You agree to indemnify, 174 | defend, and hold each Contributor harmless for any liability 175 | incurred by, or claims asserted against, such Contributor by reason 176 | of your accepting any such warranty or additional liability. 177 | 178 | END OF TERMS AND CONDITIONS 179 | 180 | APPENDIX: How to apply the Apache License to your work. 181 | 182 | To apply the Apache License to your work, attach the following 183 | boilerplate notice, with the fields enclosed by brackets "[]" 184 | replaced with your own identifying information. (Don't include 185 | the brackets!) The text should be enclosed in the appropriate 186 | comment syntax for the file format. We also recommend that a 187 | file or class name and description of purpose be included on the 188 | same "printed page" as the copyright notice for easier 189 | identification within third-party archives. 190 | 191 | Copyright [yyyy] [name of copyright owner] 192 | 193 | Licensed under the Apache License, Version 2.0 (the "License"); 194 | you may not use this file except in compliance with the License. 195 | You may obtain a copy of the License at 196 | 197 | http://www.apache.org/licenses/LICENSE-2.0 198 | 199 | Unless required by applicable law or agreed to in writing, software 200 | distributed under the License is distributed on an "AS IS" BASIS, 201 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 202 | See the License for the specific language governing permissions and 203 | limitations under the License. 204 | -------------------------------------------------------------------------------- /recipe/README.md: -------------------------------------------------------------------------------- 1 | To build a conda TensorFlow package with or without GPU support you can use 2 | docker and the `build-locally.py` script. 3 | 4 | 1. Install docker. Ensure that the following command succeeds: 5 | 6 | ```bash 7 | docker run hello-world 8 | ``` 9 | 10 | 2. Build a specific version with the command 11 | ```bash 12 | python build-locally.py 13 | ``` 14 | 15 | 3. Generally speaking, this package takes too long to compile on any of our CI 16 | resources. One should follow CFEP-03 to package this feedstock. 17 | 18 | The following script may help build all cuda version sequentially: 19 | ```bash 20 | #!/usr/env/bin bash 21 | 22 | set -ex 23 | 24 | docker system prune --force 25 | configs=$(find .ci_support/ -type f -name 'linux_64_*' -printf "%p ") 26 | 27 | # Assuming a powerful enough machine with many cores 28 | # 10 seems to be a good point where things don't run out of RAM too much. 29 | export CPU_COUNT=10 30 | 31 | mkdir -p build_artifacts 32 | 33 | for config_filename in $configs; do 34 | filename=$(basename ${config_filename}) 35 | config=${filename%.*} 36 | if [ -f build_artifacts/conda-forge-build-done-${config} ]; then 37 | echo skipped $config 38 | continue 39 | fi 40 | 41 | python build-locally.py $config | tee build_artifacts/${config}-log.txt 42 | 43 | if [ ! -f build_artifacts/conda-forge-build-done-${config} ]; then 44 | echo "it seems there was a build failure. I'm going to stop now." 45 | echo The failure seems to have originated from 46 | echo ${config} 47 | exit 1 48 | fi 49 | # docker images get quite big clean them up after each build to save your disk.... 50 | docker system prune --force 51 | done 52 | 53 | zip build_artifacts/log_files.zip build_artifacts/*-log.txt 54 | ``` 55 | -------------------------------------------------------------------------------- /recipe/add_py_toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Create a Python toolchain in the current working directory. 4 | 5 | mkdir -p py_toolchain 6 | cp $RECIPE_DIR/py_toolchain.bzl py_toolchain/BUILD 7 | sed -i "s;@@SRC_DIR@@;$SRC_DIR;" py_toolchain/BUILD 8 | 9 | cat > python.shebang <> .bazelrc <.bzl 38 | 39 | # Exceptions and TODOs: 40 | # Needs a bazel build: 41 | # com_google_absl 42 | # Build failures in tensorflow/core/platform/s3/aws_crypto.cc 43 | # boringssl (i.e. system openssl) 44 | # Most importantly: Write a patch that uses system LLVM libs for sure as well as MLIR and oneDNN/mkldnn 45 | # TODO(check): 46 | # absl_py 47 | # com_github_googleapis_googleapis 48 | # com_github_googlecloudplatform_google_cloud_cpp 49 | # Needs c++17, try on linux 50 | # com_googlesource_code_re2 51 | export TF_SYSTEM_LIBS=" 52 | astor_archive 53 | astunparse_archive 54 | boringssl 55 | com_github_googlecloudplatform_google_cloud_cpp 56 | com_github_grpc_grpc 57 | com_google_absl 58 | com_google_protobuf 59 | curl 60 | cython 61 | dill_archive 62 | flatbuffers 63 | gast_archive 64 | gif 65 | icu 66 | libjpeg_turbo 67 | org_sqlite 68 | png 69 | pybind11 70 | snappy 71 | zlib 72 | " 73 | sed -i -e "s/GRPCIO_VERSION/${libgrpc}/" tensorflow/tools/pip_package/setup.py 74 | sed -i -e "s/<6.0.0dev/<7.0.0dev/g" tensorflow/tools/pip_package/setup.py 75 | 76 | # do not build with MKL support 77 | export TF_NEED_MKL=0 78 | export BAZEL_MKL_OPT="" 79 | 80 | mkdir -p ./bazel_output_base 81 | export BAZEL_OPTS="" 82 | # Set this to something as otherwise, it would include CFLAGS which itself contains a host path and this then breaks bazel's include path validation. 83 | if [[ "${target_platform}" != *-64 ]]; then 84 | export CC_OPT_FLAGS="-O2" 85 | elif [[ "${microarch_level}" == "1" ]]; then 86 | export CC_OPT_FLAGS="-O2 -march=nocona -mtune=haswell" 87 | else 88 | export CC_OPT_FLAGS="-O2 -march=x86-64-v${microarch_level}" 89 | fi 90 | 91 | # Quick debug: 92 | # cp -r ${RECIPE_DIR}/build.sh . && bazel clean && bash -x build.sh --logging=6 | tee log.txt 93 | # Dependency graph: 94 | # bazel query 'deps(//tensorflow/tools/lib_package:libtensorflow)' --output graph > graph.in 95 | if [[ "${target_platform}" == osx-* ]]; then 96 | export LDFLAGS="${LDFLAGS} -lz -framework CoreFoundation -Xlinker -undefined -Xlinker dynamic_lookup" 97 | else 98 | export LDFLAGS="${LDFLAGS} -lrt" 99 | fi 100 | 101 | if [[ ${cuda_compiler_version} != "None" ]]; then 102 | if [ ${target_platform} == "linux-aarch64" ]; then 103 | NVARCH=sbsa 104 | elif [ ${target_platform} == "linux-64" ]; then 105 | NVARCH=x86_64 106 | else 107 | NVARCH=${ARCH} 108 | fi 109 | export LDFLAGS="${LDFLAGS} -lcusparse" 110 | export GCC_HOST_COMPILER_PATH="${GCC}" 111 | export GCC_HOST_COMPILER_PREFIX="$(dirname ${GCC})" 112 | 113 | export TF_NEED_CUDA=1 114 | export TF_CUDA_VERSION="${cuda_compiler_version}" 115 | export TF_CUDNN_VERSION="${cudnn}" 116 | export HERMETIC_CUDA_VERSION="${cuda_compiler_version}" 117 | export HERMETIC_CUDNN_VERSION="${cudnn}" 118 | export TF_NCCL_VERSION=$(pkg-config nccl --modversion | grep -Po '\d+\.\d+') 119 | 120 | export LDFLAGS="${LDFLAGS//-Wl,-z,now/-Wl,-z,lazy}" 121 | 122 | if [[ "${cuda_compiler_version}" == 12* ]]; then 123 | # TODO: add sm_100, sm_120 124 | export HERMETIC_CUDA_COMPUTE_CAPABILITIES=sm_60,sm_70,sm_75,sm_80,sm_86,sm_89,sm_90,compute_90 125 | export CUDNN_INSTALL_PATH=$PREFIX 126 | export NCCL_INSTALL_PATH=$PREFIX 127 | export CUDA_HOME="${BUILD_PREFIX}/targets/${NVARCH}-linux" 128 | export TF_CUDA_PATHS="${BUILD_PREFIX}/targets/${NVARCH}-linux,${PREFIX}/targets/${NVARCH}-linux" 129 | # XLA can only cope with a single cuda header include directory, merge both 130 | rsync -a ${PREFIX}/targets/${NVARCH}-linux/include/ ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/ 131 | 132 | # Although XLA supports a non-hermetic build, it still tries to find headers in the hermetic locations. 133 | # We do this in the BUILD_PREFIX to not have any impact on the resulting jaxlib package. 134 | # Otherwise, these copied files would be included in the package. 135 | rm -rf ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party 136 | mkdir -p ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/gpus/cuda/extras/CUPTI 137 | cp -r ${PREFIX}/targets/${NVARCH}-linux/include ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/gpus/cuda/ 138 | cp -r ${PREFIX}/targets/${NVARCH}-linux/include ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/gpus/cuda/extras/CUPTI/ 139 | mkdir -p ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/gpus/cudnn 140 | cp ${PREFIX}/include/cudnn*.h ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/gpus/cudnn/ 141 | mkdir -p ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/nccl 142 | cp ${PREFIX}/include/nccl.h ${BUILD_PREFIX}/targets/${NVARCH}-linux/include/third_party/nccl/ 143 | rsync -a ${PREFIX}/targets/${NVARCH}-linux/lib/ ${BUILD_PREFIX}/targets/${NVARCH}-linux/lib/ 144 | mkdir -p ${BUILD_PREFIX}/targets/${NVARCH}-linux/bin 145 | ln -sf ${BUILD_PREFIX}/bin/fatbinary ${BUILD_PREFIX}/targets/${NVARCH}-linux/bin/fatbinary 146 | ln -sf ${BUILD_PREFIX}/bin/nvlink ${BUILD_PREFIX}/targets/${NVARCH}-linux/bin/nvlink 147 | ln -sf ${BUILD_PREFIX}/bin/ptxas ${BUILD_PREFIX}/targets/${NVARCH}-linux/bin/ptxas 148 | 149 | export LOCAL_CUDA_PATH="${BUILD_PREFIX}/targets/${NVARCH}-linux" 150 | export LOCAL_CUDNN_PATH="${PREFIX}" 151 | export LOCAL_NCCL_PATH="${PREFIX}" 152 | 153 | # hmaarrfk -- 2023/12/30 154 | # This logic should be safe to keep in even when the underlying issue is resolved 155 | # xref: https://github.com/conda-forge/cuda-nvcc-impl-feedstock/issues/9 156 | if [[ -x ${BUILD_PREFIX}/nvvm/bin/cicc ]]; then 157 | cp ${BUILD_PREFIX}/nvvm/bin/cicc ${BUILD_PREFIX}/bin/cicc 158 | fi 159 | 160 | # Needs GCC 13+ 161 | echo "build --define=xnn_enable_avxvnniint8=false" >> .bazelrc 162 | 163 | else 164 | echo "unsupported cuda version." 165 | exit 1 166 | fi 167 | else 168 | export TF_NEED_CUDA=0 169 | fi 170 | 171 | gen-bazel-toolchain 172 | 173 | if [[ "${target_platform}" == "osx-64" ]]; then 174 | # Tensorflow doesn't cope yet with an explicit architecture (darwin_x86_64) on osx-64 yet. 175 | TARGET_CPU=darwin 176 | # See https://conda-forge.org/docs/maintainer/knowledge_base.html#newer-c-features-with-old-sdk 177 | export CXXFLAGS="${CXXFLAGS} -D_LIBCPP_DISABLE_AVAILABILITY" 178 | elif [[ "${target_platform}" == "linux-aarch64" ]]; then 179 | TARGET_CPU=aarch64 180 | elif [[ "${target_platform}" == "linux-x86_64" ]]; then 181 | TARGET_CPU=x86_64 182 | fi 183 | 184 | # Get rid of unwanted defaults 185 | sed -i -e "/PROTOBUF_INCLUDE_PATH/c\ " .bazelrc 186 | sed -i -e "/PREFIX/c\ " .bazelrc 187 | # Ensure .bazelrc ends in a newline 188 | echo "" >> .bazelrc 189 | 190 | if [[ "${target_platform}" == "osx-arm64" ]]; then 191 | echo "build --config=macos_arm64" >> .bazelrc 192 | # See https://conda-forge.org/docs/maintainer/knowledge_base.html#newer-c-features-with-old-sdk 193 | export CXXFLAGS="${CXXFLAGS} -D_LIBCPP_DISABLE_AVAILABILITY" 194 | fi 195 | export TF_ENABLE_XLA=1 196 | export BUILD_TARGET="//tensorflow/tools/pip_package:wheel //tensorflow/tools/lib_package:libtensorflow //tensorflow:libtensorflow_cc${SHLIB_EXT}" 197 | 198 | # Python settings 199 | export PYTHON_BIN_PATH=${PYTHON} 200 | export PYTHON_LIB_PATH=${SP_DIR} 201 | export USE_DEFAULT_PYTHON_LIB_PATH=1 202 | 203 | # additional settings 204 | export TF_NEED_OPENCL=0 205 | export TF_NEED_OPENCL_SYCL=0 206 | export TF_NEED_COMPUTECPP=0 207 | export TF_CUDA_CLANG=0 208 | if [[ "${target_platform}" == linux-* ]]; then 209 | export TF_NEED_CLANG=0 210 | fi 211 | export TF_NEED_TENSORRT=0 212 | export TF_NEED_ROCM=0 213 | export TF_NEED_MPI=0 214 | export TF_DOWNLOAD_CLANG=0 215 | export TF_SET_ANDROID_WORKSPACE=0 216 | export TF_CONFIGURE_IOS=0 217 | 218 | 219 | #bazel clean --expunge 220 | #bazel shutdown 221 | 222 | ./configure 223 | 224 | # Remove legacy flags set by configure that conflicts with CUDA 12's multi-directory approach. 225 | if [[ "${cuda_compiler_version}" == 12* ]]; then 226 | sed -i '/CUDA_TOOLKIT_PATH/d' .tf_configure.bazelrc 227 | fi 228 | 229 | if [[ "${build_platform}" == linux-* ]]; then 230 | $RECIPE_DIR/add_py_toolchain.sh 231 | fi 232 | 233 | cat >> .bazelrc < 3 | Date: Thu, 30 Dec 2021 06:57:09 +0200 4 | Subject: [PATCH 01/41] loosen requirements 5 | 6 | * Remove unused libclang requirement 7 | * Remove unecessary tensorflow-io-gcs-filesystem 8 | * unpin numpy 9 | 10 | Co-authored-by: Mark Harfouche 11 | Co-authored-by: ngam 12 | Co-authored-by: Uwe L. Korn 13 | --- 14 | tensorflow/tools/pip_package/setup.py | 4 ---- 15 | 1 file changed, 4 deletions(-) 16 | 17 | diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py 18 | index 8f2d06ec373..35290f2226c 100644 19 | --- a/tensorflow/tools/pip_package/setup.py 20 | +++ b/tensorflow/tools/pip_package/setup.py 21 | @@ -84,7 +84,6 @@ REQUIRED_PACKAGES = [ 22 | 'flatbuffers >= 24.3.25', 23 | 'gast >=0.2.1,!=0.5.0,!=0.5.1,!=0.5.2', 24 | 'google_pasta >= 0.1.1', 25 | - 'libclang >= 13.0.0', 26 | 'opt_einsum >= 2.3.2', 27 | 'packaging', 28 | # pylint:disable=line-too-long 29 | @@ -97,8 +96,6 @@ REQUIRED_PACKAGES = [ 30 | 'termcolor >= 1.1.0', 31 | 'typing_extensions >= 3.6.6', 32 | 'wrapt >= 1.11.0', 33 | - # TODO(b/305196096): Remove the <3.12 condition once the pkg is updated 34 | - 'tensorflow-io-gcs-filesystem >= 0.23.1 ; python_version < "3.12"', 35 | # grpcio does not build correctly on big-endian machines due to lack of 36 | # BoringSSL support. 37 | # See https://github.com/tensorflow/tensorflow/issues/17882. 38 | @@ -114,7 +111,6 @@ REQUIRED_PACKAGES = [ 39 | # 'keras >= 2.14.0rc0, < 2.15' on the release branch after the branch cut. 40 | 'tensorboard ~= 2.19.0', 41 | 'keras >= 3.5.0', 42 | - 'numpy >= 1.26.0, < 2.2.0', 43 | 'h5py >= 3.11.0', 44 | 'ml_dtypes >= 0.5.1, < 1.0.0', 45 | ] 46 | -------------------------------------------------------------------------------- /recipe/patches/0002-Add-additional-absl_synchronization-linkage-to-gRPC.patch: -------------------------------------------------------------------------------- 1 | From c26ba3c6eb5a544a01dad89dbd726b0514ff2f2e Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 30 Jun 2023 08:39:32 +0000 4 | Subject: [PATCH 02/41] Add additional absl_synchronization linkage to gRPC 5 | 6 | --- 7 | third_party/systemlibs/grpc.BUILD | 3 +++ 8 | 1 file changed, 3 insertions(+) 9 | 10 | diff --git a/third_party/systemlibs/grpc.BUILD b/third_party/systemlibs/grpc.BUILD 11 | index 0e4e862ce4c..e794699772e 100644 12 | --- a/third_party/systemlibs/grpc.BUILD 13 | +++ b/third_party/systemlibs/grpc.BUILD 14 | @@ -10,6 +10,7 @@ cc_library( 15 | linkopts = [ 16 | "-lgrpc", 17 | "-lgpr", 18 | + "-labsl_synchronization", 19 | ], 20 | visibility = ["//visibility:public"], 21 | ) 22 | @@ -19,6 +20,7 @@ cc_library( 23 | linkopts = [ 24 | "-lgrpc++", 25 | "-lgpr", 26 | + "-labsl_synchronization", 27 | ], 28 | visibility = ["//visibility:public"], 29 | ) 30 | @@ -33,6 +35,7 @@ cc_library( 31 | linkopts = [ 32 | "-lgrpc_unsecure", 33 | "-lgpr", 34 | + "-labsl_synchronization", 35 | ], 36 | visibility = ["//visibility:public"], 37 | ) 38 | -------------------------------------------------------------------------------- /recipe/patches/0003-Fix-missing-abseil-linkages.patch: -------------------------------------------------------------------------------- 1 | From 08d13175a8a97ed97de56b89f836d9a7624d677c Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Mon, 3 Jul 2023 04:45:46 +0000 4 | Subject: [PATCH 03/41] Fix missing abseil linkages 5 | 6 | --- 7 | third_party/absl/system.absl.strings.BUILD | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD 11 | index fa9a7a84f67..d04776c98bf 100644 12 | --- a/third_party/absl/system.absl.strings.BUILD 13 | +++ b/third_party/absl/system.absl.strings.BUILD 14 | @@ -26,7 +26,7 @@ cc_library( 15 | 16 | cc_library( 17 | name = "cord", 18 | - linkopts = ["-labsl_cord"], 19 | + linkopts = ["-labsl_cord", "-labsl_cordz_functions", "-labsl_cordz_info"], 20 | deps = [ 21 | ":str_format", 22 | "//absl/container:compressed_tuple", 23 | -------------------------------------------------------------------------------- /recipe/patches/0004-Fix-protobuf_python-for-systemlibs.patch: -------------------------------------------------------------------------------- 1 | From 78fe26cde07460b7e2ebfe2b017398986ea8f10a Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Wed, 30 Aug 2023 16:58:41 +0200 4 | Subject: [PATCH 04/41] Fix protobuf_python for systemlibs 5 | 6 | --- 7 | tensorflow/workspace2.bzl | 1 + 8 | third_party/systemlibs/protobuf_python.bzl | 5 +++++ 9 | 2 files changed, 6 insertions(+) 10 | create mode 100644 third_party/systemlibs/protobuf_python.bzl 11 | 12 | diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl 13 | index 5d75ffd4a6d..fe87aac9a8c 100644 14 | --- a/tensorflow/workspace2.bzl 15 | +++ b/tensorflow/workspace2.bzl 16 | @@ -401,6 +401,7 @@ def _tf_repositories(): 17 | system_link_files = { 18 | "//third_party/systemlibs:protobuf.bzl": "protobuf.bzl", 19 | "//third_party/systemlibs:protobuf_deps.bzl": "protobuf_deps.bzl", 20 | + "//third_party/systemlibs:protobuf_python.bzl": "python/BUILD", 21 | }, 22 | urls = tf_mirror_urls("https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip"), 23 | ) 24 | diff --git a/third_party/systemlibs/protobuf_python.bzl b/third_party/systemlibs/protobuf_python.bzl 25 | new file mode 100644 26 | index 00000000000..982617a765c 27 | --- /dev/null 28 | +++ b/third_party/systemlibs/protobuf_python.bzl 29 | @@ -0,0 +1,5 @@ 30 | +cc_library( 31 | + name = "proto_api", 32 | + # hdrs = ["google/protobuf/proto_api.h"], 33 | + visibility = ["//visibility:public"], 34 | +) 35 | -------------------------------------------------------------------------------- /recipe/patches/0005-Add-absl_log-systemlib.patch: -------------------------------------------------------------------------------- 1 | From 1bb1c61fa1e5a13e2db74dbc9c4aaf08d7a7e17f Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Wed, 30 Aug 2023 16:59:28 +0200 4 | Subject: [PATCH 05/41] Add absl_log systemlib 5 | 6 | --- 7 | third_party/absl/system.absl.log.BUILD | 22 ++++++++++++++++++++++ 8 | third_party/absl/workspace.bzl | 1 + 9 | 2 files changed, 23 insertions(+) 10 | create mode 100644 third_party/absl/system.absl.log.BUILD 11 | 12 | diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD 13 | new file mode 100644 14 | index 00000000000..0248bda0f52 15 | --- /dev/null 16 | +++ b/third_party/absl/system.absl.log.BUILD 17 | @@ -0,0 +1,22 @@ 18 | +load("@rules_cc//cc:defs.bzl", "cc_library") 19 | + 20 | +package(default_visibility = ["//visibility:public"]) 21 | + 22 | +cc_library( 23 | + name = "log", 24 | + linkopts = [ 25 | + "-labsl_log_internal_conditions", 26 | + "-labsl_log_internal_check_op", 27 | + "-labsl_log_internal_message", 28 | + "-labsl_log_internal_nullguard", 29 | + ], 30 | +) 31 | + 32 | +cc_library( 33 | + name = "check", 34 | + linkopts = [ 35 | + "-labsl_log_internal_check_op", 36 | + "-labsl_log_internal_message", 37 | + "-labsl_log_internal_nullguard", 38 | + ], 39 | +) 40 | diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl 41 | index d5973b13b39..b5197f9304b 100644 42 | --- a/third_party/absl/workspace.bzl 43 | +++ b/third_party/absl/workspace.bzl 44 | @@ -20,6 +20,7 @@ def repo(): 45 | "flags", 46 | "functional", 47 | "hash", 48 | + "log", 49 | "memory", 50 | "meta", 51 | "numeric", 52 | -------------------------------------------------------------------------------- /recipe/patches/0006-Omit-linking-to-layout_proto_cc-if-protobuf-linkage-.patch: -------------------------------------------------------------------------------- 1 | From 0d4bb8d059721a4969f8c0267ce92c753ba1e521 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 15 Sep 2023 11:06:27 +0200 4 | Subject: [PATCH 06/41] Omit linking to layout_proto_cc if protobuf linkage is 5 | shared 6 | 7 | --- 8 | tensorflow/python/BUILD | 1 - 9 | 1 file changed, 1 deletion(-) 10 | 11 | diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD 12 | index 8a781badc68..5edfb0a4846 100644 13 | --- a/tensorflow/python/BUILD 14 | +++ b/tensorflow/python/BUILD 15 | @@ -1382,7 +1382,6 @@ tf_python_pybind_extension( 16 | features = ["-layering_check"], 17 | deps = [ 18 | ":pywrap_densor_device_headers", 19 | - "//tensorflow/dtensor/proto:layout_proto_cc", 20 | "//tensorflow/python/lib/core:pybind11_lib", 21 | "//tensorflow/python/lib/core:pybind11_status_headers", 22 | "//third_party/python_runtime:headers", # buildcleaner: keep 23 | -------------------------------------------------------------------------------- /recipe/patches/0007-Fix-further-abseil-linkage.patch: -------------------------------------------------------------------------------- 1 | From 06977c1cf3f9af375fab69b189a3862e02774f44 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Mon, 18 Sep 2023 14:47:08 +0000 4 | Subject: [PATCH 07/41] Fix further abseil linkage 5 | 6 | --- 7 | third_party/absl/system.absl.base.BUILD | 14 ++++++++++++-- 8 | 1 file changed, 12 insertions(+), 2 deletions(-) 9 | 10 | diff --git a/third_party/absl/system.absl.base.BUILD b/third_party/absl/system.absl.base.BUILD 11 | index d6bf8748dee..b3e42f4dd8b 100644 12 | --- a/third_party/absl/system.absl.base.BUILD 13 | +++ b/third_party/absl/system.absl.base.BUILD 14 | @@ -17,12 +17,22 @@ package(default_visibility = ["//visibility:public"]) 15 | 16 | cc_library( 17 | name = "log_severity", 18 | - linkopts = ["-labsl_log_severity"], 19 | + linkopts = [ 20 | + "-labsl_log_severity", 21 | + "-labsl_log_internal_check_op", 22 | + "-labsl_log_internal_message", 23 | + "-labsl_log_internal_nullguard", 24 | + ], 25 | ) 26 | 27 | cc_library( 28 | name = "raw_logging_internal", 29 | - linkopts = ["-labsl_raw_logging_internal"], 30 | + linkopts = [ 31 | + "-labsl_raw_logging_internal", 32 | + "-labsl_log_internal_check_op", 33 | + "-labsl_log_internal_message", 34 | + "-labsl_log_internal_nullguard", 35 | + ], 36 | visibility = [ 37 | "//absl:__subpackages__", 38 | ], 39 | -------------------------------------------------------------------------------- /recipe/patches/0008-Add-constraint-to-pybind11-systemlib.patch: -------------------------------------------------------------------------------- 1 | From 2ca8630407e1761f3a1cad9e83ac62e5283cf24d Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 27 Oct 2023 11:20:12 +0200 4 | Subject: [PATCH 08/41] Add constraint to pybind11 systemlib 5 | 6 | --- 7 | third_party/systemlibs/pybind11.BUILD | 6 ++++++ 8 | 1 file changed, 6 insertions(+) 9 | 10 | diff --git a/third_party/systemlibs/pybind11.BUILD b/third_party/systemlibs/pybind11.BUILD 11 | index 44e7496fc25..21ec073b7a4 100644 12 | --- a/third_party/systemlibs/pybind11.BUILD 13 | +++ b/third_party/systemlibs/pybind11.BUILD 14 | @@ -6,3 +6,9 @@ cc_library( 15 | "@local_tsl//third_party/python_runtime:headers", 16 | ], 17 | ) 18 | + 19 | +# Needed by pybind11_bazel. 20 | +config_setting( 21 | + name = "osx", 22 | + constraint_values = ["@platforms//os:osx"], 23 | +) 24 | -------------------------------------------------------------------------------- /recipe/patches/0009-Different-file-ending-for-flatbuffers-LICENSE.patch: -------------------------------------------------------------------------------- 1 | From 98427157b56dcc5c1e785c5324c9fb0b3776fb3a Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 27 Oct 2023 11:21:11 +0200 4 | Subject: [PATCH 09/41] Different file ending for flatbuffers LICENSE 5 | 6 | --- 7 | third_party/flatbuffers/BUILD.system | 5 +++++ 8 | 1 file changed, 5 insertions(+) 9 | 10 | diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system 11 | index 8fe4d7a5907..297f8baf0c0 100644 12 | --- a/third_party/flatbuffers/BUILD.system 13 | +++ b/third_party/flatbuffers/BUILD.system 14 | @@ -5,6 +5,11 @@ filegroup( 15 | visibility = ["//visibility:public"], 16 | ) 17 | 18 | +filegroup( 19 | + name = "LICENSE", 20 | + visibility = ["//visibility:public"], 21 | +) 22 | + 23 | # Public flatc library to compile flatbuffer files at runtime. 24 | cc_library( 25 | name = "flatbuffers", 26 | -------------------------------------------------------------------------------- /recipe/patches/0010-Use-correct-hermetic-python.patch: -------------------------------------------------------------------------------- 1 | From f8a71b341d0fa6442c7c05dd0c597b5d32bf39c0 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 27 Oct 2023 11:21:38 +0200 4 | Subject: [PATCH 10/41] Use correct, hermetic python 5 | 6 | --- 7 | .../tensorflow/gen_quantized_function_library.py | 5 +++++ 8 | tensorflow/python/tools/api/generator/create_python_api.py | 2 ++ 9 | 2 files changed, 7 insertions(+) 10 | 11 | diff --git a/tensorflow/compiler/mlir/quantization/tensorflow/gen_quantized_function_library.py b/tensorflow/compiler/mlir/quantization/tensorflow/gen_quantized_function_library.py 12 | index 8352b974996..7eed8b4e373 100644 13 | --- a/tensorflow/compiler/mlir/quantization/tensorflow/gen_quantized_function_library.py 14 | +++ b/tensorflow/compiler/mlir/quantization/tensorflow/gen_quantized_function_library.py 15 | @@ -14,6 +14,11 @@ 16 | # ============================================================================== 17 | """Generates the quantized function library contained header file.""" 18 | 19 | +import sys 20 | +import os 21 | +if 'PYTHON_LIB_PATH' in os.environ: 22 | + sys.path.append(os.environ['PYTHON_LIB_PATH']) 23 | + 24 | import ast 25 | import re 26 | import string 27 | diff --git a/tensorflow/python/tools/api/generator/create_python_api.py b/tensorflow/python/tools/api/generator/create_python_api.py 28 | index cd76ae73f77..c12533c9b57 100644 29 | --- a/tensorflow/python/tools/api/generator/create_python_api.py 30 | +++ b/tensorflow/python/tools/api/generator/create_python_api.py 31 | @@ -18,6 +18,8 @@ import collections 32 | import importlib 33 | import os 34 | import sys 35 | +if 'PYTHON_LIB_PATH' in os.environ: 36 | + sys.path.append(os.environ['PYTHON_LIB_PATH']) 37 | 38 | from tensorflow.python.tools.api.generator import doc_srcs 39 | from tensorflow.python.util import tf_decorator 40 | -------------------------------------------------------------------------------- /recipe/patches/0011-Add-well_known_types_py_pb2-to-protobuf-systemlib.patch: -------------------------------------------------------------------------------- 1 | From 795cb1ee4f72fd5e4cb85b54394bf0bc763a9694 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Wed, 15 Nov 2023 09:18:28 +0100 4 | Subject: [PATCH 11/41] Add well_known_types_py_pb2 to protobuf systemlib 5 | 6 | --- 7 | third_party/systemlibs/protobuf.BUILD | 7 +++++++ 8 | 1 file changed, 7 insertions(+) 9 | 10 | diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD 11 | index c7d940605f9..504bc81af0d 100644 12 | --- a/third_party/systemlibs/protobuf.BUILD 13 | +++ b/third_party/systemlibs/protobuf.BUILD 14 | @@ -111,3 +111,10 @@ py_library( 15 | visibility = ["//visibility:public"], 16 | deps = [dep + "_proto" for dep in proto[1][1]], 17 | ) for proto in WELL_KNOWN_PROTO_MAP.items()] 18 | + 19 | +py_proto_library( 20 | + name = "well_known_types_py_pb2", 21 | + include = ".", 22 | + srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()], 23 | + visibility = ["//visibility:public"], 24 | +) 25 | -------------------------------------------------------------------------------- /recipe/patches/0012-Add-protobuf-toolchain.patch: -------------------------------------------------------------------------------- 1 | From e6d327cbf0aaf7e60e92af92a68fb1962f5ad0bc Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Wed, 15 Nov 2023 10:32:41 +0000 4 | Subject: [PATCH 12/41] Add protobuf toolchain 5 | 6 | --- 7 | third_party/systemlibs/protobuf.BUILD | 14 ++++++++++++++ 8 | 1 file changed, 14 insertions(+) 9 | 10 | diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD 11 | index 504bc81af0d..4f105d33f84 100644 12 | --- a/third_party/systemlibs/protobuf.BUILD 13 | +++ b/third_party/systemlibs/protobuf.BUILD 14 | @@ -65,12 +65,26 @@ cc_library( 15 | visibility = ["//visibility:public"], 16 | ) 17 | 18 | +cc_library( 19 | + name = "protobuf_lite", 20 | + linkopts = ["-lprotobuf-lite"], 21 | + visibility = ["//visibility:public"], 22 | +) 23 | + 24 | cc_library( 25 | name = "protobuf_headers", 26 | linkopts = ["-lprotobuf"], 27 | visibility = ["//visibility:public"], 28 | ) 29 | 30 | +proto_lang_toolchain( 31 | + name = "cc_toolchain", 32 | + command_line = "--cpp_out=$(OUT)", 33 | + blacklisted_protos = [proto + "_proto" for proto in WELL_KNOWN_PROTO_MAP.keys()], 34 | + runtime = ":protobuf", 35 | + visibility = ["//visibility:public"], 36 | +) 37 | + 38 | cc_library( 39 | name = "protoc_lib", 40 | linkopts = ["-lprotoc"], 41 | -------------------------------------------------------------------------------- /recipe/patches/0013-fix-genproto.patch: -------------------------------------------------------------------------------- 1 | From 4d9acbf4fdf1387e43c7bab3ab4848552da3c995 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Thu, 16 Nov 2023 06:38:27 +0000 4 | Subject: [PATCH 13/41] fix genproto 5 | 6 | --- 7 | third_party/xla/xla/tsl/platform/default/build_config.bzl | 5 +---- 8 | 1 file changed, 1 insertion(+), 4 deletions(-) 9 | 10 | diff --git a/third_party/xla/xla/tsl/platform/default/build_config.bzl b/third_party/xla/xla/tsl/platform/default/build_config.bzl 11 | index dd79a03cd8a..ec2b163f3b0 100644 12 | --- a/third_party/xla/xla/tsl/platform/default/build_config.bzl 13 | +++ b/third_party/xla/xla/tsl/platform/default/build_config.bzl 14 | @@ -423,10 +423,7 @@ def py_proto_library( 15 | 16 | genproto_deps = [] 17 | for dep in deps: 18 | - if dep != "@com_google_protobuf//:protobuf_python": 19 | - genproto_deps.append(dep + "_genproto") 20 | - else: 21 | - genproto_deps.append("@com_google_protobuf//:well_known_types_py_pb2_genproto") 22 | + genproto_deps.append(dep + "_genproto") 23 | 24 | proto_gen( 25 | name = name + "_genproto", 26 | -------------------------------------------------------------------------------- /recipe/patches/0014-Remove-some-usage-of-absl-str_format-in-CUDA.patch: -------------------------------------------------------------------------------- 1 | From 1754621db12ded408afaf34ce1a8dc239d7eb69f Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Thu, 23 Nov 2023 09:05:37 +0000 4 | Subject: [PATCH 14/41] Remove some usage of absl::str_format in CUDA 5 | 6 | nvcc from CUDA 11 is unable to correctly handle the new C++ features 7 | used in this template. We use a different implementation that should 8 | yield the same results without the usage of absl. 9 | --- 10 | tensorflow/core/framework/resource_base.h | 10 ++++++++-- 11 | tensorflow/core/framework/resource_var.cc | 2 ++ 12 | tensorflow/core/kernels/conv_ops_gpu.h | 2 +- 13 | 3 files changed, 11 insertions(+), 3 deletions(-) 14 | 15 | diff --git a/tensorflow/core/framework/resource_base.h b/tensorflow/core/framework/resource_base.h 16 | index c22adb559f1..8b218e6eb10 100644 17 | --- a/tensorflow/core/framework/resource_base.h 18 | +++ b/tensorflow/core/framework/resource_base.h 19 | @@ -18,8 +18,9 @@ limitations under the License. 20 | 21 | #include 22 | #include 23 | +#include 24 | 25 | -#include "absl/strings/str_format.h" 26 | +// #include "absl/strings/str_format.h" 27 | #include "tensorflow/core/lib/core/refcount.h" 28 | #include "tensorflow/core/lib/core/status.h" 29 | #include "tensorflow/core/platform/errors.h" 30 | @@ -41,7 +42,12 @@ class ResourceBase : public core::WeakRefCounted { 31 | 32 | // Returns a name for ref-counting handles. 33 | virtual std::string MakeRefCountingHandleName(int64_t resource_id) const { 34 | - return absl::StrFormat("Resource-%d-at-%p", resource_id, this); 35 | + std::ostringstream stringStream; 36 | + stringStream << "Resource-"; 37 | + stringStream << resource_id; 38 | + stringStream << "-"; 39 | + stringStream << reinterpret_cast(this); 40 | + return stringStream.str(); 41 | } 42 | 43 | // Returns memory used by this resource. 44 | diff --git a/tensorflow/core/framework/resource_var.cc b/tensorflow/core/framework/resource_var.cc 45 | index 9cd77215af9..36dff751cdf 100644 46 | --- a/tensorflow/core/framework/resource_var.cc 47 | +++ b/tensorflow/core/framework/resource_var.cc 48 | @@ -18,6 +18,8 @@ limitations under the License. 49 | #include "tensorflow/core/framework/resource_handle.h" 50 | #include "tensorflow/core/graph/graph_def_builder.h" 51 | 52 | +#include "absl/strings/str_format.h" 53 | + 54 | namespace tensorflow { 55 | 56 | absl::Status Var::AsGraphDef(GraphDefBuilder* builder, Node** out) const { 57 | diff --git a/tensorflow/core/kernels/conv_ops_gpu.h b/tensorflow/core/kernels/conv_ops_gpu.h 58 | index 627450ef2d6..d0c97527284 100644 59 | --- a/tensorflow/core/kernels/conv_ops_gpu.h 60 | +++ b/tensorflow/core/kernels/conv_ops_gpu.h 61 | @@ -22,7 +22,7 @@ limitations under the License. 62 | #include 63 | 64 | #include "absl/strings/str_cat.h" 65 | -#include "absl/strings/str_format.h" 66 | +// #include "absl/strings/str_format.h" 67 | #include "tensorflow/core/framework/op_kernel.h" 68 | #include "tensorflow/core/kernels/gpu_utils.h" 69 | #include "tensorflow/core/lib/gtl/inlined_vector.h" 70 | -------------------------------------------------------------------------------- /recipe/patches/0015-Adjust-relative-path-for-libdevice.patch: -------------------------------------------------------------------------------- 1 | From 9fe67e66ea749fb4aa46e885c1a89db8af0e5560 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Sat, 27 Jan 2024 22:27:56 -0500 4 | Subject: [PATCH 15/41] Adjust relative path for libdevice 5 | 6 | --- 7 | third_party/xla/xla/tsl/platform/default/cuda_root_path.cc | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/third_party/xla/xla/tsl/platform/default/cuda_root_path.cc b/third_party/xla/xla/tsl/platform/default/cuda_root_path.cc 11 | index 60c7dabf3ea..8d1f29b2e90 100644 12 | --- a/third_party/xla/xla/tsl/platform/default/cuda_root_path.cc 13 | +++ b/third_party/xla/xla/tsl/platform/default/cuda_root_path.cc 14 | @@ -70,7 +70,7 @@ std::vector CandidateCudaRoots() { 15 | // TF lib binaries are located in both the package's root dir and within a 16 | // 'python' subdirectory (for pywrap libs). So we check two possible paths 17 | // relative to the current binary for the wheel-based nvcc package. 18 | - for (auto path : {"../nvidia/cuda_nvcc", "../../nvidia/cuda_nvcc"}) 19 | + for (auto path : {"../nvidia/cuda_nvcc", "../../nvidia/cuda_nvcc", "../../../.."}) 20 | roots.emplace_back(io::JoinPath(dir, path)); 21 | 22 | // Also add the path to the copy of libdevice.10.bc that we include within 23 | -------------------------------------------------------------------------------- /recipe/patches/0016-Link-to-absl_log_flags-instead-of-absl_flags.patch: -------------------------------------------------------------------------------- 1 | From 325185559fa8f5de25417644fc892c39fdbe2199 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Mon, 26 Feb 2024 22:30:08 -0500 4 | Subject: [PATCH 16/41] Link to absl_log_flags instead of absl_flags 5 | 6 | --- 7 | third_party/absl/system.absl.flags.BUILD | 2 +- 8 | .../third_party/tsl/third_party/absl/system.absl.flags.BUILD | 2 +- 9 | 2 files changed, 2 insertions(+), 2 deletions(-) 10 | 11 | diff --git a/third_party/absl/system.absl.flags.BUILD b/third_party/absl/system.absl.flags.BUILD 12 | index aff653c7e5b..ee2e1bfe5e3 100644 13 | --- a/third_party/absl/system.absl.flags.BUILD 14 | +++ b/third_party/absl/system.absl.flags.BUILD 15 | @@ -97,7 +97,7 @@ cc_library( 16 | 17 | cc_library( 18 | name = "flag", 19 | - linkopts = ["-labsl_flags"], 20 | + linkopts = ["-labsl_log_flags"], 21 | deps = [ 22 | ":config", 23 | ":flag_internal", 24 | diff --git a/third_party/xla/third_party/tsl/third_party/absl/system.absl.flags.BUILD b/third_party/xla/third_party/tsl/third_party/absl/system.absl.flags.BUILD 25 | index aff653c7e5b..ee2e1bfe5e3 100644 26 | --- a/third_party/xla/third_party/tsl/third_party/absl/system.absl.flags.BUILD 27 | +++ b/third_party/xla/third_party/tsl/third_party/absl/system.absl.flags.BUILD 28 | @@ -97,7 +97,7 @@ cc_library( 29 | 30 | cc_library( 31 | name = "flag", 32 | - linkopts = ["-labsl_flags"], 33 | + linkopts = ["-labsl_log_flags"], 34 | deps = [ 35 | ":config", 36 | ":flag_internal", 37 | -------------------------------------------------------------------------------- /recipe/patches/0017-Update-ABSL-Log-Definition-for-libabsl_vlog_config_i.patch: -------------------------------------------------------------------------------- 1 | From b027d8a7e8c257e50a8db2ef875f1fdf4b5276c0 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Tue, 27 Feb 2024 18:01:19 -0500 4 | Subject: [PATCH 17/41] Update ABSL Log Definition for 5 | libabsl_vlog_config_internal.so 6 | 7 | --- 8 | third_party/absl/system.absl.log.BUILD | 2 ++ 9 | 1 file changed, 2 insertions(+) 10 | 11 | diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD 12 | index 0248bda0f52..1fe1e342594 100644 13 | --- a/third_party/absl/system.absl.log.BUILD 14 | +++ b/third_party/absl/system.absl.log.BUILD 15 | @@ -5,6 +5,7 @@ package(default_visibility = ["//visibility:public"]) 16 | cc_library( 17 | name = "log", 18 | linkopts = [ 19 | + "-labsl_vlog_config_internal", 20 | "-labsl_log_internal_conditions", 21 | "-labsl_log_internal_check_op", 22 | "-labsl_log_internal_message", 23 | @@ -15,6 +16,7 @@ cc_library( 24 | cc_library( 25 | name = "check", 26 | linkopts = [ 27 | + "-labsl_vlog_config_internal", 28 | "-labsl_log_internal_check_op", 29 | "-labsl_log_internal_message", 30 | "-labsl_log_internal_nullguard", 31 | -------------------------------------------------------------------------------- /recipe/patches/0018-add-absl_string_view-target.patch: -------------------------------------------------------------------------------- 1 | From 9222ace0a6fd07bca537ea8bbe6f0bf3e1daafc0 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 12 May 2024 18:45:03 +1100 4 | Subject: [PATCH 18/41] add absl_string_view target 5 | 6 | --- 7 | third_party/absl/system.absl.strings.BUILD | 14 +++++++++++++- 8 | 1 file changed, 13 insertions(+), 1 deletion(-) 9 | 10 | diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD 11 | index d04776c98bf..9f537def3d7 100644 12 | --- a/third_party/absl/system.absl.strings.BUILD 13 | +++ b/third_party/absl/system.absl.strings.BUILD 14 | @@ -2,6 +2,18 @@ load("@rules_cc//cc:defs.bzl", "cc_library") 15 | 16 | package(default_visibility = ["//visibility:public"]) 17 | 18 | +cc_library( 19 | + name = "string_view", 20 | + linkopts = ["-labsl_string_view"], 21 | + deps = [ 22 | + "//absl/base", 23 | + "//absl/base:config", 24 | + "//absl/base:core_headers", 25 | + "//absl/base:nullability", 26 | + "//absl/base:throw_delegate", 27 | + ], 28 | +) 29 | + 30 | cc_library( 31 | name = "strings", 32 | linkopts = ["-labsl_strings"], 33 | @@ -26,7 +38,7 @@ cc_library( 34 | 35 | cc_library( 36 | name = "cord", 37 | - linkopts = ["-labsl_cord", "-labsl_cordz_functions", "-labsl_cordz_info"], 38 | + linkopts = ["-labsl_cord", "-labsl_cordz_functions", "-labsl_cordz_info", "-labsl_cord_internal"], 39 | deps = [ 40 | ":str_format", 41 | "//absl/container:compressed_tuple", 42 | -------------------------------------------------------------------------------- /recipe/patches/0019-add-absl_nullability-target.patch: -------------------------------------------------------------------------------- 1 | From 53c5f176c1280b7e3207a28e6f3e65a5795c15a0 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 12 May 2024 19:30:44 +1100 4 | Subject: [PATCH 19/41] add absl_nullability target 5 | 6 | --- 7 | third_party/absl/system.absl.base.BUILD | 8 ++++++++ 8 | 1 file changed, 8 insertions(+) 9 | 10 | diff --git a/third_party/absl/system.absl.base.BUILD b/third_party/absl/system.absl.base.BUILD 11 | index b3e42f4dd8b..a423d5f70da 100644 12 | --- a/third_party/absl/system.absl.base.BUILD 13 | +++ b/third_party/absl/system.absl.base.BUILD 14 | @@ -25,6 +25,14 @@ cc_library( 15 | ], 16 | ) 17 | 18 | +cc_library( 19 | + name = "nullability", 20 | + deps = [ 21 | + ":core_headers", 22 | + "//absl/meta:type_traits", 23 | + ], 24 | +) 25 | + 26 | cc_library( 27 | name = "raw_logging_internal", 28 | linkopts = [ 29 | -------------------------------------------------------------------------------- /recipe/patches/0020-add-absl_prefetch-target.patch: -------------------------------------------------------------------------------- 1 | From b9df95b322f94fa8377f060e7ba4718e1fcaaaae Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 12 May 2024 22:29:25 +1100 4 | Subject: [PATCH 20/41] add absl_prefetch target 5 | 6 | --- 7 | third_party/absl/system.absl.base.BUILD | 8 ++++++++ 8 | 1 file changed, 8 insertions(+) 9 | 10 | diff --git a/third_party/absl/system.absl.base.BUILD b/third_party/absl/system.absl.base.BUILD 11 | index a423d5f70da..b31592d31a3 100644 12 | --- a/third_party/absl/system.absl.base.BUILD 13 | +++ b/third_party/absl/system.absl.base.BUILD 14 | @@ -116,6 +116,14 @@ cc_library( 15 | ], 16 | ) 17 | 18 | +cc_library( 19 | + name = "prefetch", 20 | + deps = [ 21 | + ":config", 22 | + ":core_headers", 23 | + ], 24 | +) 25 | + 26 | cc_library( 27 | name = "strerror", 28 | linkopts = ["-labsl_strerror"], 29 | -------------------------------------------------------------------------------- /recipe/patches/0021-add-absl_die_if_null-target.patch: -------------------------------------------------------------------------------- 1 | From 98381356151907b49a5dec905d52ea523abbc5f3 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 12 May 2024 22:46:04 +1100 4 | Subject: [PATCH 21/41] add absl_die_if_null target 5 | 6 | --- 7 | third_party/absl/system.absl.log.BUILD | 16 ++++++++++++++++ 8 | 1 file changed, 16 insertions(+) 9 | 10 | diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD 11 | index 1fe1e342594..141bb319489 100644 12 | --- a/third_party/absl/system.absl.log.BUILD 13 | +++ b/third_party/absl/system.absl.log.BUILD 14 | @@ -2,6 +2,22 @@ load("@rules_cc//cc:defs.bzl", "cc_library") 15 | 16 | package(default_visibility = ["//visibility:public"]) 17 | 18 | +cc_library( 19 | + name = "die_if_null", 20 | + linkopts = ["-labsl_die_if_null"], 21 | + deps = [ 22 | + ":log", 23 | + "//absl/base:config", 24 | + "//absl/base:core_headers", 25 | + "//absl/strings", 26 | + ], 27 | +) 28 | + 29 | +alias( 30 | + name = "absl_log", 31 | + actual = ":log", 32 | +) 33 | + 34 | cc_library( 35 | name = "log", 36 | linkopts = [ 37 | -------------------------------------------------------------------------------- /recipe/patches/0022-add-absl_crc32c-targets.patch: -------------------------------------------------------------------------------- 1 | From d062da2c65a8aab38325f1a02b253a668f7227a7 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Mon, 13 May 2024 09:41:09 +1100 4 | Subject: [PATCH 22/41] add absl_crc32c targets 5 | 6 | --- 7 | third_party/absl/system.absl.crc.BUILD | 70 ++++++++++++++++++++++++++ 8 | third_party/absl/workspace.bzl | 1 + 9 | 2 files changed, 71 insertions(+) 10 | create mode 100644 third_party/absl/system.absl.crc.BUILD 11 | 12 | diff --git a/third_party/absl/system.absl.crc.BUILD b/third_party/absl/system.absl.crc.BUILD 13 | new file mode 100644 14 | index 00000000000..487c36ff45b 15 | --- /dev/null 16 | +++ b/third_party/absl/system.absl.crc.BUILD 17 | @@ -0,0 +1,70 @@ 18 | +load("@rules_cc//cc:defs.bzl", "cc_library") 19 | + 20 | +package(default_visibility = ["//visibility:public"]) 21 | + 22 | +cc_library( 23 | + name = "crc32c", 24 | + linkopts = [ 25 | + "-labsl_crc32c", 26 | + "-labsl_crc_cord_state", 27 | + "-labsl_crc_cpu_detect", 28 | + "-labsl_crc_internal", 29 | + ], 30 | + deps = [ 31 | + ":cpu_detect", 32 | + ":crc_internal", 33 | + ":non_temporal_memcpy", 34 | + "//absl/base:config", 35 | + "//absl/base:core_headers", 36 | + "//absl/base:endian", 37 | + "//absl/base:prefetch", 38 | + "//absl/strings", 39 | + "//absl/strings:str_format", 40 | + ], 41 | +) 42 | + 43 | +cc_library( 44 | + name = "cpu_detect", 45 | + visibility = ["//visibility:private"], 46 | + deps = [ 47 | + "//absl/base", 48 | + "//absl/base:config", 49 | + ], 50 | +) 51 | + 52 | +cc_library( 53 | + name = "crc_internal", 54 | + visibility = ["//visibility:private"], 55 | + deps = [ 56 | + ":cpu_detect", 57 | + "//absl/base:config", 58 | + "//absl/base:core_headers", 59 | + "//absl/base:endian", 60 | + "//absl/base:prefetch", 61 | + "//absl/base:raw_logging_internal", 62 | + "//absl/memory", 63 | + "//absl/numeric:bits", 64 | + ], 65 | +) 66 | + 67 | +cc_library( 68 | + name = "non_temporal_memcpy", 69 | + visibility = [ 70 | + ":__pkg__", 71 | + ], 72 | + deps = [ 73 | + ":non_temporal_arm_intrinsics", 74 | + "//absl/base:config", 75 | + "//absl/base:core_headers", 76 | + ], 77 | +) 78 | + 79 | +cc_library( 80 | + name = "non_temporal_arm_intrinsics", 81 | + visibility = [ 82 | + ":__pkg__", 83 | + ], 84 | + deps = [ 85 | + "//absl/base:config", 86 | + ], 87 | +) 88 | diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl 89 | index b5197f9304b..0bb3fbbf074 100644 90 | --- a/third_party/absl/workspace.bzl 91 | +++ b/third_party/absl/workspace.bzl 92 | @@ -14,6 +14,7 @@ def repo(): 93 | SYS_DIRS = [ 94 | "algorithm", 95 | "base", 96 | + "crc", 97 | "cleanup", 98 | "container", 99 | "debugging", 100 | -------------------------------------------------------------------------------- /recipe/patches/0023-add-kernel_timeout_internal-target.patch: -------------------------------------------------------------------------------- 1 | From 15e565f9a3ba59df7a36d32104e729483d552357 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Mon, 13 May 2024 23:36:48 +1100 4 | Subject: [PATCH 23/41] add kernel_timeout_internal target 5 | 6 | --- 7 | .../absl/system.absl.synchronization.BUILD | 15 +++++++++++++++ 8 | 1 file changed, 15 insertions(+) 9 | 10 | diff --git a/third_party/absl/system.absl.synchronization.BUILD b/third_party/absl/system.absl.synchronization.BUILD 11 | index c0fa37aacd7..b47309deb39 100644 12 | --- a/third_party/absl/system.absl.synchronization.BUILD 13 | +++ b/third_party/absl/system.absl.synchronization.BUILD 14 | @@ -16,14 +16,29 @@ cc_library( 15 | ], 16 | ) 17 | 18 | +cc_library( 19 | + name = "kernel_timeout_internal", 20 | + visibility = [ 21 | + ], 22 | + deps = [ 23 | + "//absl/base", 24 | + "//absl/base:config", 25 | + "//absl/base:core_headers", 26 | + "//absl/base:raw_logging_internal", 27 | + "//absl/time", 28 | + ], 29 | +) 30 | + 31 | cc_library( 32 | name = "synchronization", 33 | linkopts = [ 34 | "-labsl_synchronization", 35 | + "-labsl_kernel_timeout_internal", 36 | "-pthread", 37 | ], 38 | deps = [ 39 | ":graphcycles_internal", 40 | + ":kernel_timeout_internal", 41 | "//absl/base", 42 | "//absl/base:atomic_hook", 43 | "//absl/base:dynamic_annotations", 44 | -------------------------------------------------------------------------------- /recipe/patches/0024-work-around-for-warning-that-clang-falsely-treats-as.patch: -------------------------------------------------------------------------------- 1 | From 5c601370b413359c22c2dfeca6ec76c64bd89880 Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 19 May 2024 20:52:35 +1100 4 | Subject: [PATCH 24/41] work around for warning that clang falsely treats as 5 | error 6 | 7 | see https://github.com/llvm/llvm-project/issues/92630 8 | --- 9 | .bazelrc | 2 +- 10 | 1 file changed, 1 insertion(+), 1 deletion(-) 11 | 12 | diff --git a/.bazelrc b/.bazelrc 13 | index b9f83146895..6ba3875c4de 100644 14 | --- a/.bazelrc 15 | +++ b/.bazelrc 16 | @@ -331,7 +331,7 @@ build:ios --noenable_platform_specific_config 17 | build:android --copt=-w 18 | build:ios --copt=-w 19 | build:linux --host_copt=-w 20 | -build:macos --copt=-w 21 | +build:macos --copt=-w --copt=-Wno-c++11-narrowing 22 | build:windows --copt=/W0 23 | build:windows --host_copt=/W0 24 | 25 | -------------------------------------------------------------------------------- /recipe/patches/0025-Hardcode-BUILD_PREFIX-in-build_pip_package.patch: -------------------------------------------------------------------------------- 1 | From d9c7dd6bc48f7f894ea5179d014c176bdc1412a3 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Fri, 12 Jul 2024 12:52:38 +0000 4 | Subject: [PATCH 25/41] Hardcode BUILD_PREFIX in build_pip_package 5 | 6 | --- 7 | tensorflow/tools/pip_package/build_pip_package.py | 6 +++--- 8 | 1 file changed, 3 insertions(+), 3 deletions(-) 9 | 10 | diff --git a/tensorflow/tools/pip_package/build_pip_package.py b/tensorflow/tools/pip_package/build_pip_package.py 11 | index 4809d5ec7a7..e254358d435 100644 12 | --- a/tensorflow/tools/pip_package/build_pip_package.py 13 | +++ b/tensorflow/tools/pip_package/build_pip_package.py 14 | @@ -341,18 +341,18 @@ def patch_so(srcs_dir: str) -> None: 15 | for file, path in to_patch.items(): 16 | rpath = ( 17 | subprocess.check_output( 18 | - ["patchelf", "--print-rpath", "{}/{}".format(srcs_dir, file)] 19 | + ["BUILD_PREFIX/bin/patchelf", "--print-rpath", "{}/{}".format(srcs_dir, file)] 20 | ) 21 | .decode() 22 | .strip() 23 | ) 24 | new_rpath = rpath + ":" + path 25 | subprocess.run( 26 | - ["patchelf", "--set-rpath", new_rpath, "{}/{}".format(srcs_dir, file)], 27 | + ["BUILD_PREFIX/bin/patchelf", "--set-rpath", new_rpath, "{}/{}".format(srcs_dir, file)], 28 | check=True, 29 | ) 30 | subprocess.run( 31 | - ["patchelf", "--shrink-rpath", "{}/{}".format(srcs_dir, file)], 32 | + ["BUILD_PREFIX/bin/patchelf", "--shrink-rpath", "{}/{}".format(srcs_dir, file)], 33 | check=True, 34 | ) 35 | 36 | -------------------------------------------------------------------------------- /recipe/patches/0026-Only-link-to-sparse_core_layout_proto_cc-headers.patch: -------------------------------------------------------------------------------- 1 | From 1510b19daaecd746d6b0bcbfbac16c8ef449ec01 Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Tue, 16 Jul 2024 17:18:23 +0200 4 | Subject: [PATCH 26/41] Only link to sparse_core_layout_proto_cc headers 5 | 6 | --- 7 | tensorflow/core/tpu/kernels/BUILD | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/tensorflow/core/tpu/kernels/BUILD b/tensorflow/core/tpu/kernels/BUILD 11 | index e65eea35bd0..5f10b019634 100644 12 | --- a/tensorflow/core/tpu/kernels/BUILD 13 | +++ b/tensorflow/core/tpu/kernels/BUILD 14 | @@ -1486,7 +1486,7 @@ cc_library( 15 | hdrs = ["sparse_core_layout.h"], 16 | visibility = ["//tensorflow/python/tpu:__pkg__"], # ONLY for `_pywrap_sparse_core_layout`. 17 | deps = [ 18 | - ":sparse_core_layout_proto_cc", 19 | + ":sparse_core_layout_proto_cc_headers_only", 20 | "//tensorflow/core/platform:stringpiece", 21 | "@com_google_absl//absl/container:btree", 22 | "@com_google_absl//absl/log:check", 23 | -------------------------------------------------------------------------------- /recipe/patches/0027-Protobuf-5-compatability.patch: -------------------------------------------------------------------------------- 1 | From c3fabee4eb7c2e420a31a37ad10cffb5c6cc426b Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Thu, 10 Oct 2024 19:59:31 +0200 4 | Subject: [PATCH 27/41] Protobuf 5 compatability 5 | 6 | --- 7 | .../mlir/lite/python/jax_to_tfl_flatbuffer.cc | 2 +- 8 | .../mlir/tensorflow/utils/parse_text_proto.cc | 2 +- 9 | tensorflow/core/debug/debug_io_utils.cc | 4 ++ 10 | .../core/ir/importexport/parse_text_proto.cc | 2 +- 11 | .../profiler/convert/hlo_to_tools_data.cc | 4 ++ 12 | .../convert/xplane_to_memory_profile.cc | 4 ++ 13 | .../profiler/convert/xplane_to_tools_data.cc | 8 +++ 14 | tensorflow/core/util/proto/proto_utils.cc | 10 ++-- 15 | tensorflow/core/util/proto/proto_utils.h | 4 +- 16 | .../proto_text/gen_proto_text_functions.cc | 4 +- 17 | tensorflow/workspace2.bzl | 4 ++ 18 | ...Migrate-from-AddError-to-RecordError.patch | 33 ++++++++++++ 19 | ...y-with-protobuf-v26-migrate-from-dep.patch | 52 +++++++++++++++++++ 20 | .../platform/default/human_readable_json.cc | 4 ++ 21 | 14 files changed, 125 insertions(+), 12 deletions(-) 22 | create mode 100644 third_party/0001-Migrate-from-AddError-to-RecordError.patch 23 | create mode 100644 third_party/0002-Fix-compatibility-with-protobuf-v26-migrate-from-dep.patch 24 | 25 | diff --git a/tensorflow/compiler/mlir/lite/python/jax_to_tfl_flatbuffer.cc b/tensorflow/compiler/mlir/lite/python/jax_to_tfl_flatbuffer.cc 26 | index d4a2d02db6a..1448305ca2b 100644 27 | --- a/tensorflow/compiler/mlir/lite/python/jax_to_tfl_flatbuffer.cc 28 | +++ b/tensorflow/compiler/mlir/lite/python/jax_to_tfl_flatbuffer.cc 29 | @@ -57,7 +57,7 @@ namespace { 30 | // Error collector that simply ignores errors reported. 31 | class NoOpErrorCollector : public tsl::protobuf::io::ErrorCollector { 32 | public: 33 | - void AddError(int line, int column, const std::string& message) override {} 34 | + void RecordError(int line, int column, const absl::string_view message) override {} 35 | }; 36 | 37 | absl::StatusOr LoadHloProto(const std::string& contents) { 38 | diff --git a/tensorflow/compiler/mlir/tensorflow/utils/parse_text_proto.cc b/tensorflow/compiler/mlir/tensorflow/utils/parse_text_proto.cc 39 | index aa2d9406e91..1d41972266f 100644 40 | --- a/tensorflow/compiler/mlir/tensorflow/utils/parse_text_proto.cc 41 | +++ b/tensorflow/compiler/mlir/tensorflow/utils/parse_text_proto.cc 42 | @@ -30,7 +30,7 @@ namespace { 43 | // Error collector that simply ignores errors reported. 44 | class NoOpErrorCollector : public protobuf::io::ErrorCollector { 45 | public: 46 | - void AddError(int line, int column, const std::string& message) override {} 47 | + void RecordError(int line, int column, absl::string_view message) override {} 48 | }; 49 | } // namespace 50 | 51 | diff --git a/tensorflow/core/debug/debug_io_utils.cc b/tensorflow/core/debug/debug_io_utils.cc 52 | index 04317455a94..700b5b2b645 100644 53 | --- a/tensorflow/core/debug/debug_io_utils.cc 54 | +++ b/tensorflow/core/debug/debug_io_utils.cc 55 | @@ -94,7 +94,11 @@ Event PrepareChunkEventProto(const DebugNodeKey& debug_node_key, 56 | // Encode the data in JSON. 57 | string json_output; 58 | tensorflow::protobuf::util::JsonPrintOptions json_options; 59 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 60 | json_options.always_print_primitive_fields = true; 61 | +#else 62 | + json_options.always_print_fields_with_no_presence = true; 63 | +#endif 64 | auto status = tensorflow::protobuf::util::MessageToJsonString( 65 | metadata, &json_output, json_options); 66 | if (status.ok()) { 67 | diff --git a/tensorflow/core/ir/importexport/parse_text_proto.cc b/tensorflow/core/ir/importexport/parse_text_proto.cc 68 | index c631785cefc..3ab6e17c3aa 100644 69 | --- a/tensorflow/core/ir/importexport/parse_text_proto.cc 70 | +++ b/tensorflow/core/ir/importexport/parse_text_proto.cc 71 | @@ -35,7 +35,7 @@ namespace { 72 | // Error collector that simply ignores errors reported. 73 | class NoOpErrorCollector : public tensorflow::protobuf::io::ErrorCollector { 74 | public: 75 | - void AddError(int line, int column, const std::string& message) override {} 76 | + void RecordError(int line, int column, const absl::string_view message) override {} 77 | }; 78 | } // namespace 79 | 80 | diff --git a/tensorflow/core/profiler/convert/hlo_to_tools_data.cc b/tensorflow/core/profiler/convert/hlo_to_tools_data.cc 81 | index 7fbcd397906..d17f3402a3f 100644 82 | --- a/tensorflow/core/profiler/convert/hlo_to_tools_data.cc 83 | +++ b/tensorflow/core/profiler/convert/hlo_to_tools_data.cc 84 | @@ -62,7 +62,11 @@ absl::StatusOr ConvertHloProtoToMemoryViewer( 85 | 86 | std::string json_output; 87 | tensorflow::protobuf::util::JsonPrintOptions options; 88 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 89 | options.always_print_primitive_fields = true; 90 | +#else 91 | + options.always_print_fields_with_no_presence = true; 92 | +#endif 93 | auto encoded_status = tensorflow::protobuf::util::MessageToJsonString( 94 | result_or.value(), &json_output, options); 95 | if (!encoded_status.ok()) { 96 | diff --git a/tensorflow/core/profiler/convert/xplane_to_memory_profile.cc b/tensorflow/core/profiler/convert/xplane_to_memory_profile.cc 97 | index 612a40bc3f7..7f82387011a 100644 98 | --- a/tensorflow/core/profiler/convert/xplane_to_memory_profile.cc 99 | +++ b/tensorflow/core/profiler/convert/xplane_to_memory_profile.cc 100 | @@ -530,7 +530,11 @@ template 101 | absl::Status ConvertProtoToJson(const Proto& proto_output, 102 | std::string* json_output) { 103 | protobuf::util::JsonPrintOptions json_options; 104 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 105 | json_options.always_print_primitive_fields = true; 106 | +#else 107 | + json_options.always_print_fields_with_no_presence = true; 108 | +#endif 109 | auto status = protobuf::util::MessageToJsonString(proto_output, json_output, 110 | json_options); 111 | if (!status.ok()) { 112 | diff --git a/tensorflow/core/profiler/convert/xplane_to_tools_data.cc b/tensorflow/core/profiler/convert/xplane_to_tools_data.cc 113 | index 01168576114..09b284e34f5 100644 114 | --- a/tensorflow/core/profiler/convert/xplane_to_tools_data.cc 115 | +++ b/tensorflow/core/profiler/convert/xplane_to_tools_data.cc 116 | @@ -230,7 +230,11 @@ absl::StatusOr ConvertMultiXSpacesToPodViewer( 117 | 118 | std::string json_output; 119 | protobuf::util::JsonPrintOptions opts; 120 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 121 | opts.always_print_primitive_fields = true; 122 | +#else 123 | + opts.always_print_fields_with_no_presence = true; 124 | +#endif 125 | auto encode_status = protobuf::util::MessageToJsonString( 126 | ConvertOpStatsToPodViewer(combined_op_stats), &json_output, opts); 127 | if (!encode_status.ok()) { 128 | @@ -310,7 +314,11 @@ absl::StatusOr ConvertMultiXSpacesToOpProfileViewer( 129 | profile); 130 | std::string json_output; 131 | protobuf::util::JsonPrintOptions opts; 132 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 133 | opts.always_print_primitive_fields = true; 134 | +#else 135 | + opts.always_print_fields_with_no_presence = true; 136 | +#endif 137 | 138 | auto encode_status = 139 | protobuf::util::MessageToJsonString(profile, &json_output, opts); 140 | diff --git a/tensorflow/core/util/proto/proto_utils.cc b/tensorflow/core/util/proto/proto_utils.cc 141 | index be13bdd8767..23d1e14ba5a 100644 142 | --- a/tensorflow/core/util/proto/proto_utils.cc 143 | +++ b/tensorflow/core/util/proto/proto_utils.cc 144 | @@ -101,17 +101,17 @@ StringErrorCollector::StringErrorCollector(string* error_text, 145 | } 146 | } 147 | 148 | -void StringErrorCollector::AddError(int line, int column, 149 | - const string& message) { 150 | +void StringErrorCollector::RecordError(int line, int column, 151 | + absl::string_view message) { 152 | if (error_text_ != nullptr) { 153 | absl::SubstituteAndAppend(error_text_, "$0($1): $2\n", line + index_offset_, 154 | column + index_offset_, message); 155 | } 156 | } 157 | 158 | -void StringErrorCollector::AddWarning(int line, int column, 159 | - const string& message) { 160 | - AddError(line, column, message); 161 | +void StringErrorCollector::RecordWarning(int line, int column, 162 | + absl::string_view message) { 163 | + RecordError(line, column, message); 164 | } 165 | 166 | } // namespace proto_utils 167 | diff --git a/tensorflow/core/util/proto/proto_utils.h b/tensorflow/core/util/proto/proto_utils.h 168 | index 01b8ad0d747..58c4a301ee9 100644 169 | --- a/tensorflow/core/util/proto/proto_utils.h 170 | +++ b/tensorflow/core/util/proto/proto_utils.h 171 | @@ -53,10 +53,10 @@ class StringErrorCollector : public protobuf::io::ErrorCollector { 172 | StringErrorCollector& operator=(const StringErrorCollector&) = delete; 173 | 174 | // Implementation of protobuf::io::ErrorCollector::AddError. 175 | - void AddError(int line, int column, const string& message) override; 176 | + void RecordError(int line, int column, absl::string_view message) override; 177 | 178 | // Implementation of protobuf::io::ErrorCollector::AddWarning. 179 | - void AddWarning(int line, int column, const string& message) override; 180 | + void RecordWarning(int line, int column, absl::string_view message) override; 181 | 182 | private: 183 | string* const error_text_; 184 | diff --git a/tensorflow/tools/proto_text/gen_proto_text_functions.cc b/tensorflow/tools/proto_text/gen_proto_text_functions.cc 185 | index 9fab9130d94..557b17f3d0c 100644 186 | --- a/tensorflow/tools/proto_text/gen_proto_text_functions.cc 187 | +++ b/tensorflow/tools/proto_text/gen_proto_text_functions.cc 188 | @@ -31,8 +31,8 @@ class CrashOnErrorCollector 189 | public: 190 | ~CrashOnErrorCollector() override {} 191 | 192 | - void AddError(const string& filename, int line, int column, 193 | - const string& message) override { 194 | + void RecordError(absl::string_view filename, int line, int column, 195 | + absl::string_view message) override { 196 | LOG(FATAL) << "Unexpected error at " << filename << "@" << line << ":" 197 | << column << " - " << message; 198 | } 199 | diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl 200 | index fe87aac9a8c..eaab025f55f 100644 201 | --- a/tensorflow/workspace2.bzl 202 | +++ b/tensorflow/workspace2.bzl 203 | @@ -873,6 +873,10 @@ def _tf_repositories(): 204 | name = "riegeli", 205 | sha256 = "1d216d5c97fa60632143d209a1bb48c2a83788efdb876902e7bbc06396d5ee1f", 206 | strip_prefix = "riegeli-5d75119232cd4f6db8dfa69a1503289f050e9643", 207 | + patch_file = [ 208 | + "//third_party:0001-Migrate-from-AddError-to-RecordError.patch", 209 | + "//third_party:0002-Fix-compatibility-with-protobuf-v26-migrate-from-dep.patch", 210 | + ], 211 | urls = tf_mirror_urls("https://github.com/google/riegeli/archive/5d75119232cd4f6db8dfa69a1503289f050e9643.zip"), 212 | ) 213 | 214 | diff --git a/third_party/0001-Migrate-from-AddError-to-RecordError.patch b/third_party/0001-Migrate-from-AddError-to-RecordError.patch 215 | new file mode 100644 216 | index 00000000000..0ed75ee8019 217 | --- /dev/null 218 | +++ b/third_party/0001-Migrate-from-AddError-to-RecordError.patch 219 | @@ -0,0 +1,33 @@ 220 | +From e75b32516c67dbcb0cd1a698d6700c3d7bd85a35 Mon Sep 17 00:00:00 2001 221 | +From: Compression Team 222 | +Date: Fri, 31 May 2024 18:20:47 +0200 223 | +Subject: [PATCH 1/2] Migrate from AddError() to RecordError() 224 | + 225 | +In Protobuf v22 RecordError() has been introduced and AddError() deprecated. 226 | +In Protobuf v26 AddError() has been removed. 227 | + 228 | +PiperOrigin-RevId: 639057502 229 | +--- 230 | + riegeli/records/record_reader.cc | 6 +++--- 231 | + 1 file changed, 3 insertions(+), 3 deletions(-) 232 | + 233 | +diff --git a/riegeli/records/record_reader.cc b/riegeli/records/record_reader.cc 234 | +index 499432ab..f4d90cea 100644 235 | +--- a/riegeli/records/record_reader.cc 236 | ++++ b/riegeli/records/record_reader.cc 237 | +@@ -62,9 +62,9 @@ namespace riegeli { 238 | + class RecordsMetadataDescriptors::ErrorCollector 239 | + : public google::protobuf::DescriptorPool::ErrorCollector { 240 | + public: 241 | +- void AddError(const std::string& filename, const std::string& element_name, 242 | +- const google::protobuf::Message* descriptor, 243 | +- ErrorLocation location, const std::string& message) override { 244 | ++ void RecordError(absl::string_view filename, absl::string_view element_name, 245 | ++ const google::protobuf::Message* descriptor, 246 | ++ ErrorLocation location, absl::string_view message) override { 247 | + descriptors_->Fail(absl::InvalidArgumentError( 248 | + absl::StrCat("Error in file ", filename, ", element ", element_name, 249 | + ": ", message))); 250 | +-- 251 | +2.34.1 252 | + 253 | diff --git a/third_party/0002-Fix-compatibility-with-protobuf-v26-migrate-from-dep.patch b/third_party/0002-Fix-compatibility-with-protobuf-v26-migrate-from-dep.patch 254 | new file mode 100644 255 | index 00000000000..b5af8d2dff5 256 | --- /dev/null 257 | +++ b/third_party/0002-Fix-compatibility-with-protobuf-v26-migrate-from-dep.patch 258 | @@ -0,0 +1,52 @@ 259 | +From 96dccef7210d7e182446b2db7424b16f44a66dd0 Mon Sep 17 00:00:00 2001 260 | +From: Marcin Kowalczyk 261 | +Date: Tue, 4 Jun 2024 16:50:59 +0200 262 | +Subject: [PATCH 2/2] =?UTF-8?q?Fix=20compatibility=20with=20protobuf=20v26?= 263 | + =?UTF-8?q?+:=20migrate=20from=20deprecated=20`AddError()`=20to=20`RecordE?= 264 | + =?UTF-8?q?rror()`=20also=20in=20text=20parser=E2=80=99s=20`ErrorCollector?= 265 | + =?UTF-8?q?`.?= 266 | +MIME-Version: 1.0 267 | +Content-Type: text/plain; charset=UTF-8 268 | +Content-Transfer-Encoding: 8bit 269 | + 270 | +PiperOrigin-RevId: 640152285 271 | +--- 272 | + riegeli/messages/text_parse.cc | 6 +++--- 273 | + riegeli/messages/text_parse.h | 4 ++-- 274 | + 2 files changed, 5 insertions(+), 5 deletions(-) 275 | + 276 | +diff --git a/riegeli/messages/text_parse.cc b/riegeli/messages/text_parse.cc 277 | +index 94a28180..5089c728 100644 278 | +--- a/riegeli/messages/text_parse.cc 279 | ++++ b/riegeli/messages/text_parse.cc 280 | +@@ -35,9 +35,9 @@ namespace riegeli { 281 | + 282 | + namespace messages_internal { 283 | + 284 | +-void StringErrorCollector::AddError(int line, 285 | +- google::protobuf::io::ColumnNumber column, 286 | +- const std::string& message) { 287 | ++void StringErrorCollector::RecordError( 288 | ++ int line, google::protobuf::io::ColumnNumber column, 289 | ++ absl::string_view message) { 290 | + if (line >= 0) { 291 | + absl::StrAppend(&errors_, "\nAt ", line + 1, ":", column + 1, ": ", 292 | + message); 293 | +diff --git a/riegeli/messages/text_parse.h b/riegeli/messages/text_parse.h 294 | +index e6bb4db2..ee234ec8 100644 295 | +--- a/riegeli/messages/text_parse.h 296 | ++++ b/riegeli/messages/text_parse.h 297 | +@@ -39,8 +39,8 @@ namespace messages_internal { 298 | + 299 | + class StringErrorCollector : public google::protobuf::io::ErrorCollector { 300 | + public: 301 | +- void AddError(int line, google::protobuf::io::ColumnNumber column, 302 | +- const std::string& message) override; 303 | ++ void RecordError(int line, google::protobuf::io::ColumnNumber column, 304 | ++ absl::string_view message) override; 305 | + 306 | + absl::string_view errors() const { return errors_; } 307 | + 308 | +-- 309 | +2.34.1 310 | + 311 | diff --git a/third_party/xla/xla/tsl/platform/default/human_readable_json.cc b/third_party/xla/xla/tsl/platform/default/human_readable_json.cc 312 | index 5c3da22fddd..375ca1d701d 100644 313 | --- a/third_party/xla/xla/tsl/platform/default/human_readable_json.cc 314 | +++ b/third_party/xla/xla/tsl/platform/default/human_readable_json.cc 315 | @@ -33,7 +33,11 @@ absl::StatusOr ProtoToHumanReadableJson( 316 | 317 | protobuf::util::JsonPrintOptions json_options; 318 | json_options.preserve_proto_field_names = true; 319 | +#if GOOGLE_PROTOBUF_VERSION < 5026000 320 | json_options.always_print_primitive_fields = true; 321 | +#else 322 | + json_options.always_print_fields_with_no_presence = true; 323 | +#endif 324 | auto status = 325 | protobuf::util::MessageToJsonString(proto, &result, json_options); 326 | if (!status.ok()) { 327 | -------------------------------------------------------------------------------- /recipe/patches/0028-Avoid-linking-with-internal-nvrtc.patch: -------------------------------------------------------------------------------- 1 | From 34df27ad44fe2ede5cd85fe0bbd5d642544b8876 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Sun, 2 Feb 2025 08:32:46 -0500 4 | Subject: [PATCH 28/41] Avoid linking with internal nvrtc 5 | 6 | --- 7 | third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl | 5 ----- 8 | .../tsl/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl | 5 ----- 9 | 2 files changed, 10 deletions(-) 10 | 11 | diff --git a/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl b/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 12 | index fea4c5d7ce7..1f6f8fc3577 100644 13 | --- a/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 14 | +++ b/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 15 | @@ -10,16 +10,11 @@ cc_import( 16 | shared_library = "lib/libnvrtc.so.%{libnvrtc_version}", 17 | ) 18 | 19 | -cc_import( 20 | - name = "nvrtc_builtins", 21 | - shared_library = "lib/libnvrtc-builtins.so.%{libnvrtc-builtins_version}", 22 | -) 23 | %{multiline_comment} 24 | cc_library( 25 | name = "nvrtc", 26 | %{comment}deps = [ 27 | %{comment}":nvrtc_main", 28 | - %{comment}":nvrtc_builtins", 29 | %{comment}], 30 | %{comment}linkopts = cuda_rpath_flags("nvidia/cuda_nvrtc/lib"), 31 | visibility = ["//visibility:public"], 32 | diff --git a/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl b/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 33 | index fea4c5d7ce7..1f6f8fc3577 100644 34 | --- a/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 35 | +++ b/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_nvrtc.BUILD.tpl 36 | @@ -10,16 +10,11 @@ cc_import( 37 | shared_library = "lib/libnvrtc.so.%{libnvrtc_version}", 38 | ) 39 | 40 | -cc_import( 41 | - name = "nvrtc_builtins", 42 | - shared_library = "lib/libnvrtc-builtins.so.%{libnvrtc-builtins_version}", 43 | -) 44 | %{multiline_comment} 45 | cc_library( 46 | name = "nvrtc", 47 | %{comment}deps = [ 48 | %{comment}":nvrtc_main", 49 | - %{comment}":nvrtc_builtins", 50 | %{comment}], 51 | %{comment}linkopts = cuda_rpath_flags("nvidia/cuda_nvrtc/lib"), 52 | visibility = ["//visibility:public"], 53 | -------------------------------------------------------------------------------- /recipe/patches/0029-remove-dependencies-to-libcuda.patch: -------------------------------------------------------------------------------- 1 | From 8b730febbfa87d441e2e17a131f81bb4412f58bf Mon Sep 17 00:00:00 2001 2 | From: Jinzhe Zeng 3 | Date: Fri, 7 Feb 2025 03:41:29 -0500 4 | Subject: [PATCH 29/41] remove dependencies to libcuda 5 | 6 | --- 7 | third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl | 2 +- 8 | .../tsl/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl | 2 +- 9 | 2 files changed, 2 insertions(+), 2 deletions(-) 10 | 11 | diff --git a/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl b/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 12 | index fabb310001c..8a079831b34 100644 13 | --- a/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 14 | +++ b/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 15 | @@ -28,7 +28,7 @@ cc_import( 16 | %{multiline_comment} 17 | cc_library( 18 | name = "cuda_driver", 19 | - %{comment}deps = [":cuda_stub"], 20 | + #%{comment}deps = [":cuda_stub"], 21 | visibility = ["//visibility:public"], 22 | ) 23 | 24 | diff --git a/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl b/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 25 | index fabb310001c..8a079831b34 100644 26 | --- a/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 27 | +++ b/third_party/xla/third_party/tsl/third_party/gpus/cuda/hermetic/cuda_cudart.BUILD.tpl 28 | @@ -28,7 +28,7 @@ cc_import( 29 | %{multiline_comment} 30 | cc_library( 31 | name = "cuda_driver", 32 | - %{comment}deps = [":cuda_stub"], 33 | + #%{comment}deps = [":cuda_stub"], 34 | visibility = ["//visibility:public"], 35 | ) 36 | 37 | -------------------------------------------------------------------------------- /recipe/patches/0030-Fixup-pybind11_protobuf.patch: -------------------------------------------------------------------------------- 1 | From 3e45daedd0fbd56d9ea2fca66a95b5e7561c4151 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Sun, 6 Jul 2025 16:45:03 -0400 4 | Subject: [PATCH 30/41] Fixup pybind11_protobuf 5 | 6 | --- 7 | tensorflow/workspace2.bzl | 8 +++++--- 8 | third_party/pybind11_protobuf/remove_license.patch | 10 +++++----- 9 | third_party/xla/workspace2.bzl | 11 ++++++++--- 10 | 3 files changed, 18 insertions(+), 11 deletions(-) 11 | 12 | diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl 13 | index eaab025f55f..f402d5eb2a4 100644 14 | --- a/tensorflow/workspace2.bzl 15 | +++ b/tensorflow/workspace2.bzl 16 | @@ -788,11 +788,13 @@ def _tf_repositories(): 17 | 18 | tf_http_archive( 19 | name = "pybind11_protobuf", 20 | - urls = tf_mirror_urls("https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip"), 21 | - sha256 = "c7ab64b1ccf9a678694a89035a8c865a693e4e872803778f91f0965c2f281d78", 22 | - strip_prefix = "pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363", 23 | + urls = tf_mirror_urls("https://github.com/pybind/pybind11_protobuf/archive/f02a2b7653bc50eb5119d125842a3870db95d251.zip"), 24 | + sha256 = "3cf7bf0f23954c5ce6c37f0a215f506efa3035ca06e3b390d67f4cbe684dce23", 25 | + strip_prefix = "pybind11_protobuf-f02a2b7653bc50eb5119d125842a3870db95d251", 26 | patch_file = [ 27 | "//third_party/pybind11_protobuf:remove_license.patch", 28 | + "//third_party/pybind11_protobuf:0001-DO-not-link-to-proto_api.patch", 29 | + "//third_party/pybind11_protobuf:0002-Add-Python-include-path.patch", 30 | ], 31 | ) 32 | 33 | diff --git a/third_party/pybind11_protobuf/remove_license.patch b/third_party/pybind11_protobuf/remove_license.patch 34 | index 2e41474ee4a..7180fa8167e 100644 35 | --- a/third_party/pybind11_protobuf/remove_license.patch 36 | +++ b/third_party/pybind11_protobuf/remove_license.patch 37 | @@ -1,13 +1,13 @@ 38 | -diff --git third_party/pybind11_protobuf/BUILD third_party/pybind11_protobuf/BUILD 39 | -index b62eb91..b7d1240 100644 40 | +diff --git a/pybind11_protobuf/BUILD b/pybind11_protobuf/BUILD 41 | +index 3393167..5f5568d 100644 42 | --- a/pybind11_protobuf/BUILD 43 | +++ b/pybind11_protobuf/BUILD 44 | -@@ -3,8 +3,6 @@ 45 | +@@ -2,8 +2,6 @@ 46 | + 47 | load("@pybind11_bazel//:build_defs.bzl", "pybind_library") 48 | - load("@bazel_skylib//rules:common_settings.bzl", "bool_flag") 49 | 50 | -licenses(["notice"]) 51 | - 52 | pybind_library( 53 | name = "enum_type_caster", 54 | - hdrs = ["enum_type_caster.h"], 55 | \ No newline at end of file 56 | + hdrs = ["enum_type_caster.h"], 57 | diff --git a/third_party/xla/workspace2.bzl b/third_party/xla/workspace2.bzl 58 | index a1e98c628e7..2adf2bcd80b 100644 59 | --- a/third_party/xla/workspace2.bzl 60 | +++ b/third_party/xla/workspace2.bzl 61 | @@ -153,9 +153,14 @@ def _tf_repositories(): 62 | 63 | tf_http_archive( 64 | name = "pybind11_protobuf", 65 | - urls = tf_mirror_urls("https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip"), 66 | - sha256 = "c7ab64b1ccf9a678694a89035a8c865a693e4e872803778f91f0965c2f281d78", 67 | - strip_prefix = "pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363", 68 | + urls = tf_mirror_urls("https://github.com/pybind/pybind11_protobuf/archive/f02a2b7653bc50eb5119d125842a3870db95d251.zip"), 69 | + sha256 = "3cf7bf0f23954c5ce6c37f0a215f506efa3035ca06e3b390d67f4cbe684dce23", 70 | + strip_prefix = "pybind11_protobuf-f02a2b7653bc50eb5119d125842a3870db95d251", 71 | + patch_file = [ 72 | + "//third_party/pybind11_protobuf:remove_license.patch", 73 | + "//third_party/pybind11_protobuf:0001-DO-not-link-to-proto_api.patch", 74 | + "//third_party/pybind11_protobuf:0002-Add-Python-include-path.patch", 75 | + ], 76 | ) 77 | 78 | # buildifier: disable=function-docstring 79 | -------------------------------------------------------------------------------- /recipe/patches/0031-Update-linkages-for-new-absl-organization.patch: -------------------------------------------------------------------------------- 1 | From 0e7434e753fc57ae6aad9569b914cc82b5292870 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Sun, 6 Jul 2025 17:32:42 -0400 4 | Subject: [PATCH 31/41] Update linkages for new absl organization 5 | 6 | --- 7 | third_party/absl/system.absl.random.BUILD | 1 - 8 | third_party/absl/system.absl.types.BUILD | 2 -- 9 | .../third_party/tsl/third_party/absl/system.absl.random.BUILD | 1 - 10 | .../third_party/tsl/third_party/absl/system.absl.types.BUILD | 2 -- 11 | 4 files changed, 6 deletions(-) 12 | 13 | diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD 14 | index ac17ce6343b..71aaa9a1cd5 100644 15 | --- a/third_party/absl/system.absl.random.BUILD 16 | +++ b/third_party/absl/system.absl.random.BUILD 17 | @@ -30,7 +30,6 @@ cc_library( 18 | name = "seed_sequences", 19 | linkopts = [ 20 | "-labsl_random_internal_platform", 21 | - "-labsl_random_internal_pool_urbg", 22 | "-labsl_random_internal_randen", 23 | "-labsl_random_internal_randen_hwaes", 24 | "-labsl_random_internal_randen_hwaes_impl", 25 | diff --git a/third_party/absl/system.absl.types.BUILD b/third_party/absl/system.absl.types.BUILD 26 | index db94fc99185..623c0e96126 100644 27 | --- a/third_party/absl/system.absl.types.BUILD 28 | +++ b/third_party/absl/system.absl.types.BUILD 29 | @@ -30,7 +30,6 @@ cc_library( 30 | 31 | cc_library( 32 | name = "bad_optional_access", 33 | - linkopts = ["-labsl_bad_optional_access"], 34 | deps = [ 35 | "//absl/base:raw_logging_internal", 36 | ], 37 | @@ -38,7 +37,6 @@ cc_library( 38 | 39 | cc_library( 40 | name = "bad_variant_access", 41 | - linkopts = ["-labsl_bad_variant_access"], 42 | deps = [ 43 | "//absl/base:raw_logging_internal", 44 | ], 45 | diff --git a/third_party/xla/third_party/tsl/third_party/absl/system.absl.random.BUILD b/third_party/xla/third_party/tsl/third_party/absl/system.absl.random.BUILD 46 | index ac17ce6343b..71aaa9a1cd5 100644 47 | --- a/third_party/xla/third_party/tsl/third_party/absl/system.absl.random.BUILD 48 | +++ b/third_party/xla/third_party/tsl/third_party/absl/system.absl.random.BUILD 49 | @@ -30,7 +30,6 @@ cc_library( 50 | name = "seed_sequences", 51 | linkopts = [ 52 | "-labsl_random_internal_platform", 53 | - "-labsl_random_internal_pool_urbg", 54 | "-labsl_random_internal_randen", 55 | "-labsl_random_internal_randen_hwaes", 56 | "-labsl_random_internal_randen_hwaes_impl", 57 | diff --git a/third_party/xla/third_party/tsl/third_party/absl/system.absl.types.BUILD b/third_party/xla/third_party/tsl/third_party/absl/system.absl.types.BUILD 58 | index db94fc99185..623c0e96126 100644 59 | --- a/third_party/xla/third_party/tsl/third_party/absl/system.absl.types.BUILD 60 | +++ b/third_party/xla/third_party/tsl/third_party/absl/system.absl.types.BUILD 61 | @@ -30,7 +30,6 @@ cc_library( 62 | 63 | cc_library( 64 | name = "bad_optional_access", 65 | - linkopts = ["-labsl_bad_optional_access"], 66 | deps = [ 67 | "//absl/base:raw_logging_internal", 68 | ], 69 | @@ -38,7 +37,6 @@ cc_library( 70 | 71 | cc_library( 72 | name = "bad_variant_access", 73 | - linkopts = ["-labsl_bad_variant_access"], 74 | deps = [ 75 | "//absl/base:raw_logging_internal", 76 | ], 77 | -------------------------------------------------------------------------------- /recipe/patches/0032-Remove-ambiguous-inherited-constructor-in-default_qu.patch: -------------------------------------------------------------------------------- 1 | From 58ee9b5a3fdad20f827f9f0f2e94a228888f45ba Mon Sep 17 00:00:00 2001 2 | From: "Patrick J. LoPresti" 3 | Date: Thu, 3 Apr 2025 10:24:37 -0700 4 | Subject: [PATCH 32/41] Remove ambiguous inherited constructor in 5 | default_quant_params.cc. GCC complains about this 6 | (https://stackoverflow.com/q/79553477/). Fix is trivial and harmless. 7 | 8 | Fixes #84977. 9 | --- 10 | .../compiler/mlir/lite/transforms/default_quant_params.cc | 4 +++- 11 | 1 file changed, 3 insertions(+), 1 deletion(-) 12 | 13 | diff --git a/tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc b/tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc 14 | index f1b602a6763..7acbb7d1724 100644 15 | --- a/tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc 16 | +++ b/tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc 17 | @@ -54,7 +54,9 @@ namespace { 18 | class DefaultQuantParamsPass 19 | : public impl::DefaultQuantParamsPassBase { 20 | public: 21 | - using DefaultQuantParamsPassBase::DefaultQuantParamsPassBase; 22 | + DefaultQuantParamsPass() 23 | + { 24 | + } 25 | 26 | explicit DefaultQuantParamsPass(double default_min, double default_max, 27 | bool is_signed) { 28 | -------------------------------------------------------------------------------- /recipe/patches/0033-third_party-ducc-fix-ambiguous-failure.patch: -------------------------------------------------------------------------------- 1 | From b53fc0aebaee09319e32161c2c7e7c0cd24d1d9f Mon Sep 17 00:00:00 2001 2 | From: Hongxu Jia 3 | Date: Mon, 14 Jul 2025 15:35:54 +0800 4 | Subject: [PATCH 33/41] third_party/ducc: fix ambiguous failure 5 | 6 | ... 7 | bazel-out/k8-opt/bin/external/ducc/_virtual_includes/fft/ducc/src/ducc0/fft/fftnd_impl.h:1393:20: error: call of '(ducc0::detail_mav::vmav, 2>) (long unsigned int&, size_t&)' is ambiguous 8 | 1393 | tmp(i,j) *= conj(roots[i*j]); 9 | | ~~~^~~~~ 10 | In file included from bazel-out/k8-opt/bin/external/ducc/_virtual_includes/fft/ducc/src/ducc0/fft/fft.h:65, 11 | from external/ducc/google/fft.cc:26: 12 | external/ducc/src/ducc0/infra/mav.h:803:41: note: there are 2 candidates 13 | 803 | template class vmav: public cmav 14 | | ^~~~ 15 | external/ducc/src/ducc0/infra/mav.h:765:39: note: candidate 1: 'const T& ducc0::detail_mav::cmav::operator()(Ns ...) const [with Ns = {long unsigned int, long unsigned int}; T = std::complex; long unsigned int ndim = 2]' 16 | 765 | template const T &operator()(Ns... ns) const 17 | | ^~~~~~~~ 18 | external/ducc/src/ducc0/infra/mav.h:856:33: note: candidate 2: 'T& ducc0::detail_mav::vmav::operator()(Ns ...) const [with Ns = {long unsigned int, long unsigned int}; T = std::complex; long unsigned int ndim = 2]' 19 | 856 | template T &operator()(Ns... ns) const 20 | ... 21 | 22 | Upstream-Status: Pending 23 | 24 | Signed-off-by: Hongxu Jia 25 | --- 26 | .../ducc/0001-fix-ambiguous-failure.patch | 51 +++++++++++++++++++ 27 | third_party/ducc/ducc.BUILD | 1 + 28 | third_party/ducc/workspace.bzl | 3 ++ 29 | 3 files changed, 55 insertions(+) 30 | create mode 100644 third_party/ducc/0001-fix-ambiguous-failure.patch 31 | 32 | diff --git a/third_party/ducc/0001-fix-ambiguous-failure.patch b/third_party/ducc/0001-fix-ambiguous-failure.patch 33 | new file mode 100644 34 | index 00000000000..2abb2848952 35 | --- /dev/null 36 | +++ b/third_party/ducc/0001-fix-ambiguous-failure.patch 37 | @@ -0,0 +1,51 @@ 38 | +From e297a09e813aa001be02737bddd2a7a1555518a7 Mon Sep 17 00:00:00 2001 39 | +From: Hongxu Jia 40 | +Date: Mon, 14 Jul 2025 15:21:20 +0800 41 | +Subject: [PATCH] fix ambiguous failure 42 | + 43 | +... 44 | +bazel-out/k8-opt/bin/external/ducc/_virtual_includes/fft/ducc/src/ducc0/fft/fftnd_impl.h:1393:20: error: call of '(ducc0::detail_mav::vmav, 2>) (long unsigned int&, size_t&)' is ambiguous 45 | + 1393 | tmp(i,j) *= conj(roots[i*j]); 46 | + | ~~~^~~~~ 47 | +In file included from bazel-out/k8-opt/bin/external/ducc/_virtual_includes/fft/ducc/src/ducc0/fft/fft.h:65, 48 | + from external/ducc/google/fft.cc:26: 49 | +external/ducc/src/ducc0/infra/mav.h:803:41: note: there are 2 candidates 50 | + 803 | template class vmav: public cmav 51 | + | ^~~~ 52 | +external/ducc/src/ducc0/infra/mav.h:765:39: note: candidate 1: 'const T& ducc0::detail_mav::cmav::operator()(Ns ...) const [with Ns = {long unsigned int, long unsigned int}; T = std::complex; long unsigned int ndim = 2]' 53 | + 765 | template const T &operator()(Ns... ns) const 54 | + | ^~~~~~~~ 55 | +external/ducc/src/ducc0/infra/mav.h:856:33: note: candidate 2: 'T& ducc0::detail_mav::vmav::operator()(Ns ...) const [with Ns = {long unsigned int, long unsigned int}; T = std::complex; long unsigned int ndim = 2]' 56 | + 856 | template T &operator()(Ns... ns) const 57 | +... 58 | + 59 | +Signed-off-by: Hongxu Jia 60 | +--- 61 | + src/ducc0/infra/mav.h | 4 ++-- 62 | + 1 file changed, 2 insertions(+), 2 deletions(-) 63 | + 64 | +diff --git a/src/ducc0/infra/mav.h b/src/ducc0/infra/mav.h 65 | +index 73f0441..87a9bc9 100644 66 | +--- a/src/ducc0/infra/mav.h 67 | ++++ b/src/ducc0/infra/mav.h 68 | +@@ -640,7 +640,7 @@ template class vfmav: public cfmav 69 | + 70 | + T *data() const 71 | + { return const_cast(tbuf::d); } 72 | +- template T &raw(I i) const 73 | ++ template const T &raw(I i) const 74 | + { return data()[i]; } 75 | + 76 | + // no-op. Needed for template tricks. 77 | +@@ -853,7 +853,7 @@ template class vmav: public cmav 78 | + vfmav to_fmav() const { return operator vfmav(); } 79 | + 80 | + using parent::operator(); 81 | +- template T &operator()(Ns... ns) const 82 | ++ template const T &operator()(Ns... ns) const 83 | + { return const_cast(parent::operator()(ns...)); } 84 | + 85 | + template vmav subarray(const vector &slices) const 86 | +-- 87 | +2.34.1 88 | + 89 | diff --git a/third_party/ducc/ducc.BUILD b/third_party/ducc/ducc.BUILD 90 | index a1c4956d0a7..c82f1247629 100644 91 | --- a/third_party/ducc/ducc.BUILD 92 | +++ b/third_party/ducc/ducc.BUILD 93 | @@ -9,6 +9,7 @@ DUCC_COPTS = [ 94 | "-frtti", 95 | "-fexceptions", 96 | "-ffp-contract=fast", 97 | + "-fpermissive", 98 | ] 99 | 100 | # This library exposes the raw DUCC fft API. It should be used 101 | diff --git a/third_party/ducc/workspace.bzl b/third_party/ducc/workspace.bzl 102 | index 99c8b14cd9f..0168c5e9886 100644 103 | --- a/third_party/ducc/workspace.bzl 104 | +++ b/third_party/ducc/workspace.bzl 105 | @@ -11,6 +11,9 @@ def repo(): 106 | sha256 = DUCC_SHA256, 107 | urls = tf_mirror_urls("https://gitlab.mpcdf.mpg.de/mtr/ducc/-/archive/{commit}/ducc-{commit}.tar.gz".format(commit = DUCC_COMMIT)), 108 | build_file = "//third_party/ducc:ducc.BUILD", 109 | + patch_file = [ 110 | + "//third_party/ducc:0001-fix-ambiguous-failure.patch", 111 | + ], 112 | link_files = { 113 | "//third_party/ducc:ducc0_custom_lowlevel_threading.h": "google/ducc0_custom_lowlevel_threading.h", 114 | "//third_party/ducc:fft.h": "google/fft.h", 115 | -------------------------------------------------------------------------------- /recipe/patches/0034-third_party-tf_runtime-fix-compile-failure.patch: -------------------------------------------------------------------------------- 1 | From e3ccc6eab6a708cf57bca45dcb8f7afc23156cfb Mon Sep 17 00:00:00 2001 2 | From: Hongxu Jia 3 | Date: Mon, 14 Jul 2025 16:13:33 +0800 4 | Subject: [PATCH 34/41] third_party/tf_runtime: fix compile failure 5 | 6 | ... 7 | In file included from external/tf_runtime/include/tfrt/support/crc32c.h:26, 8 | from external/tf_runtime/lib/support/crc32c.cc:19: 9 | external/tf_runtime/include/tfrt/support/forward_decls.h:110:15: error: 'int64_t' does not name a type 10 | 110 | using Index = int64_t; 11 | | ^~~~~~~ 12 | external/tf_runtime/include/tfrt/support/forward_decls.h:28:1: note: 'int64_t' is defined in header ''; this is probably fixable by adding '#include ' 13 | 27 | #include "llvm/Support/ErrorHandling.h" 14 | +++ |+#include 15 | ... 16 | 17 | Upstream-Status: Pending 18 | 19 | Signed-off-by: Hongxu Jia 20 | --- 21 | .../tf_runtime/0001-fix-compile-failure.patch | 36 +++++++++++++++++++ 22 | third_party/tf_runtime/workspace.bzl | 6 ++-- 23 | 2 files changed, 39 insertions(+), 3 deletions(-) 24 | create mode 100644 third_party/tf_runtime/0001-fix-compile-failure.patch 25 | 26 | diff --git a/third_party/tf_runtime/0001-fix-compile-failure.patch b/third_party/tf_runtime/0001-fix-compile-failure.patch 27 | new file mode 100644 28 | index 00000000000..c05ea5b41e7 29 | --- /dev/null 30 | +++ b/third_party/tf_runtime/0001-fix-compile-failure.patch 31 | @@ -0,0 +1,36 @@ 32 | +From 7723f8c54b2b6f0db9c1420d4478de1f6dcf9459 Mon Sep 17 00:00:00 2001 33 | +From: Hongxu Jia 34 | +Date: Mon, 14 Jul 2025 16:04:18 +0800 35 | +Subject: [PATCH] fix compile failure 36 | + 37 | +... 38 | +In file included from external/tf_runtime/include/tfrt/support/crc32c.h:26, 39 | + from external/tf_runtime/lib/support/crc32c.cc:19: 40 | +external/tf_runtime/include/tfrt/support/forward_decls.h:110:15: error: 'int64_t' does not name a type 41 | + 110 | using Index = int64_t; 42 | + | ^~~~~~~ 43 | +external/tf_runtime/include/tfrt/support/forward_decls.h:28:1: note: 'int64_t' is defined in header ''; this is probably fixable by adding '#include ' 44 | + 27 | #include "llvm/Support/ErrorHandling.h" 45 | + +++ |+#include 46 | +... 47 | + 48 | +Signed-off-by: Hongxu Jia 49 | +--- 50 | + include/tfrt/support/forward_decls.h | 1 + 51 | + 1 file changed, 1 insertion(+) 52 | + 53 | +diff --git a/include/tfrt/support/forward_decls.h b/include/tfrt/support/forward_decls.h 54 | +index c3a4250..d8e28db 100644 55 | +--- a/include/tfrt/support/forward_decls.h 56 | ++++ b/include/tfrt/support/forward_decls.h 57 | +@@ -25,6 +25,7 @@ 58 | + 59 | + #include "llvm/Support/Casting.h" 60 | + #include "llvm/Support/ErrorHandling.h" 61 | ++#include 62 | + 63 | + // We don't forward declare: 64 | + // DenseMap, SmallVector, StringMap, StringSet 65 | +-- 66 | +2.34.1 67 | + 68 | diff --git a/third_party/tf_runtime/workspace.bzl b/third_party/tf_runtime/workspace.bzl 69 | index 69fc0f81f6f..460641dde06 100644 70 | --- a/third_party/tf_runtime/workspace.bzl 71 | +++ b/third_party/tf_runtime/workspace.bzl 72 | @@ -18,7 +18,7 @@ def repo(): 73 | "@tsl": "@local_tsl", 74 | "@xla": "@local_xla", 75 | }, 76 | - # A patch file can be provided for atomic commits to both TF and TFRT. 77 | - # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. 78 | - patch_file = None, 79 | + patch_file = [ 80 | + "//third_party/tf_runtime:0001-fix-compile-failure.patch", 81 | + ], 82 | ) 83 | -------------------------------------------------------------------------------- /recipe/patches/0035-support-to-build-with-gcc-15.patch: -------------------------------------------------------------------------------- 1 | From 32ce8d77beabcc754ec5ae2b446ff7f2f3378251 Mon Sep 17 00:00:00 2001 2 | From: Hongxu Jia 3 | Date: Tue, 25 Mar 2025 22:38:37 -0700 4 | Subject: [PATCH 35/41] support to build with gcc 15 5 | 6 | Upstream-Status: Pending 7 | 8 | Suggested-by: Martin Jansa 9 | Signed-off-by: Hongxu Jia 10 | --- 11 | ...r-internal-Explicitly-include-cstdin.patch | 35 +++++++++++++++++++ 12 | third_party/absl/workspace.bzl | 1 + 13 | ...r-internal-Explicitly-include-cstdin.patch | 35 +++++++++++++++++++ 14 | .../tsl/third_party/absl/workspace.bzl | 1 + 15 | 4 files changed, 72 insertions(+) 16 | create mode 100644 third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 17 | create mode 100644 third_party/xla/third_party/tsl/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 18 | 19 | diff --git a/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch b/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 20 | new file mode 100644 21 | index 00000000000..02f6385b5c6 22 | --- /dev/null 23 | +++ b/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 24 | @@ -0,0 +1,35 @@ 25 | +From 809e5de7b92950849289236a5a09e9cb4f32c7b9 Mon Sep 17 00:00:00 2001 26 | +From: Christopher Fore 27 | +Date: Mon, 5 Aug 2024 12:09:18 -0700 28 | +Subject: [PATCH] PR #1739: container/internal: Explicitly include 29 | + 30 | +Imported from GitHub PR https://github.com/abseil/abseil-cpp/pull/1739 31 | + 32 | +GCC 15 will no longer include by default, resulting in build failures in projects that do not explicitly include it. 33 | + 34 | +Merge faf1b03a591f06933da02976119da5743f428e4f into 9cb5e5d15c142e5cc43a2c1db87c8e4e5b6d38a5 35 | + 36 | +Merging this change closes #1739 37 | + 38 | +COPYBARA_INTEGRATE_REVIEW=https://github.com/abseil/abseil-cpp/pull/1739 from csfore:gcc-15-fix faf1b03a591f06933da02976119da5743f428e4f 39 | +PiperOrigin-RevId: 659637669 40 | +Change-Id: If14cb0e3522774cb700bd5a74abffb75feb7a0f5 41 | +--- 42 | + absl/container/internal/container_memory.h | 1 + 43 | + 1 file changed, 1 insertion(+) 44 | + 45 | +diff --git a/absl/container/internal/container_memory.h b/absl/container/internal/container_memory.h 46 | +index ba8e08a2..e7031797 100644 47 | +--- a/absl/container/internal/container_memory.h 48 | ++++ b/absl/container/internal/container_memory.h 49 | +@@ -17,6 +17,7 @@ 50 | + 51 | + #include 52 | + #include 53 | ++#include 54 | + #include 55 | + #include 56 | + #include 57 | +-- 58 | +2.25.1 59 | + 60 | diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl 61 | index 0bb3fbbf074..ae6259859a1 100644 62 | --- a/third_party/absl/workspace.bzl 63 | +++ b/third_party/absl/workspace.bzl 64 | @@ -48,5 +48,6 @@ def repo(): 65 | urls = tf_mirror_urls("https://github.com/abseil/abseil-cpp/archive/{commit}.tar.gz".format(commit = ABSL_COMMIT)), 66 | patch_file = [ 67 | "//third_party/absl:nvidia_jetson.patch", 68 | + "//third_party/absl:0001-PR-1739-container-internal-Explicitly-include-cstdin.patch", 69 | ], 70 | ) 71 | diff --git a/third_party/xla/third_party/tsl/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch b/third_party/xla/third_party/tsl/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 72 | new file mode 100644 73 | index 00000000000..02f6385b5c6 74 | --- /dev/null 75 | +++ b/third_party/xla/third_party/tsl/third_party/absl/0001-PR-1739-container-internal-Explicitly-include-cstdin.patch 76 | @@ -0,0 +1,35 @@ 77 | +From 809e5de7b92950849289236a5a09e9cb4f32c7b9 Mon Sep 17 00:00:00 2001 78 | +From: Christopher Fore 79 | +Date: Mon, 5 Aug 2024 12:09:18 -0700 80 | +Subject: [PATCH] PR #1739: container/internal: Explicitly include 81 | + 82 | +Imported from GitHub PR https://github.com/abseil/abseil-cpp/pull/1739 83 | + 84 | +GCC 15 will no longer include by default, resulting in build failures in projects that do not explicitly include it. 85 | + 86 | +Merge faf1b03a591f06933da02976119da5743f428e4f into 9cb5e5d15c142e5cc43a2c1db87c8e4e5b6d38a5 87 | + 88 | +Merging this change closes #1739 89 | + 90 | +COPYBARA_INTEGRATE_REVIEW=https://github.com/abseil/abseil-cpp/pull/1739 from csfore:gcc-15-fix faf1b03a591f06933da02976119da5743f428e4f 91 | +PiperOrigin-RevId: 659637669 92 | +Change-Id: If14cb0e3522774cb700bd5a74abffb75feb7a0f5 93 | +--- 94 | + absl/container/internal/container_memory.h | 1 + 95 | + 1 file changed, 1 insertion(+) 96 | + 97 | +diff --git a/absl/container/internal/container_memory.h b/absl/container/internal/container_memory.h 98 | +index ba8e08a2..e7031797 100644 99 | +--- a/absl/container/internal/container_memory.h 100 | ++++ b/absl/container/internal/container_memory.h 101 | +@@ -17,6 +17,7 @@ 102 | + 103 | + #include 104 | + #include 105 | ++#include 106 | + #include 107 | + #include 108 | + #include 109 | +-- 110 | +2.25.1 111 | + 112 | diff --git a/third_party/xla/third_party/tsl/third_party/absl/workspace.bzl b/third_party/xla/third_party/tsl/third_party/absl/workspace.bzl 113 | index d5973b13b39..c9004e92df8 100644 114 | --- a/third_party/xla/third_party/tsl/third_party/absl/workspace.bzl 115 | +++ b/third_party/xla/third_party/tsl/third_party/absl/workspace.bzl 116 | @@ -46,5 +46,6 @@ def repo(): 117 | urls = tf_mirror_urls("https://github.com/abseil/abseil-cpp/archive/{commit}.tar.gz".format(commit = ABSL_COMMIT)), 118 | patch_file = [ 119 | "//third_party/absl:nvidia_jetson.patch", 120 | + "//third_party/absl:0001-PR-1739-container-internal-Explicitly-include-cstdin.patch", 121 | ], 122 | ) 123 | -------------------------------------------------------------------------------- /recipe/patches/0036-third_party-eigen_archive-workaround-ice-failure-whi.patch: -------------------------------------------------------------------------------- 1 | From 98d2b8a662865334a04a46675ae3a4259e08b362 Mon Sep 17 00:00:00 2001 2 | From: Hongxu Jia 3 | Date: Mon, 16 May 2022 23:15:47 -0700 4 | Subject: [PATCH 36/41] third_party/eigen_archive:workaround ice failure while 5 | building from source with gcc 7.3.1 6 | 7 | The root cause is gcc ice issue: 8 | https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89752 9 | 10 | But we could not patch on native gcc,so try to workaround, 11 | Refer: https://github.com/tensorflow/tensorflow/issues/25323 12 | 13 | Upstream-Status: Inappropriate [wr-installer specific] 14 | 15 | Signed-off-by: Hongxu Jia 16 | --- 17 | ...ailure-while-building-from-source-wi.patch | 68 +++++++++++++++++++ 18 | third_party/eigen3/workspace.bzl | 1 + 19 | 2 files changed, 69 insertions(+) 20 | create mode 100644 third_party/eigen3/0001-workaround-ice-failure-while-building-from-source-wi.patch 21 | 22 | diff --git a/third_party/eigen3/0001-workaround-ice-failure-while-building-from-source-wi.patch b/third_party/eigen3/0001-workaround-ice-failure-while-building-from-source-wi.patch 23 | new file mode 100644 24 | index 00000000000..82ef62a0d9a 25 | --- /dev/null 26 | +++ b/third_party/eigen3/0001-workaround-ice-failure-while-building-from-source-wi.patch 27 | @@ -0,0 +1,68 @@ 28 | +From 649d0b938aab1b7081c3895aa1f527d034edb735 Mon Sep 17 00:00:00 2001 29 | +From: Hongxu Jia 30 | +Date: Fri, 23 Aug 2024 10:49:54 +0800 31 | +Subject: [PATCH] workaround ice failure while building from source with gcc 32 | + 7.3.1 33 | + 34 | +The root cause is gcc ice issue: 35 | +https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89752 36 | + 37 | +But we could not patch on native gcc,so try to workaround, 38 | +Refer: https://github.com/tensorflow/tensorflow/issues/25323 39 | + 40 | +Signed-off-by: Hongxu Jia 41 | +--- 42 | + unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h | 4 +++- 43 | + unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h | 9 ++++++--- 44 | + 2 files changed, 9 insertions(+), 4 deletions(-) 45 | + 46 | +diff --git a/unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h b/unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h 47 | +index 291301a..94e78ba 100644 48 | +--- a/unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h 49 | ++++ b/unsupported/Eigen/CXX11/src/Tensor/TensorImagePatch.h 50 | +@@ -531,7 +531,9 @@ struct TensorEvaluator, Device> { 51 | + EIGEN_ALIGN_MAX std::remove_const_t values[PacketSize]; 52 | + EIGEN_UNROLL_LOOP 53 | + for (int i = 0; i < PacketSize; ++i) { 54 | +- values[i] = coeff(index + i); 55 | ++ Self::CoeffReturnType a = coeff(index+i); 56 | ++ values[i] = a; 57 | ++ 58 | + } 59 | + PacketReturnType rslt = internal::pload(values); 60 | + return rslt; 61 | +diff --git a/unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h b/unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h 62 | +index 2ecbb7c..ae0f745 100644 63 | +--- a/unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h 64 | ++++ b/unsupported/Eigen/CXX11/src/Tensor/TensorReduction.h 65 | +@@ -838,8 +838,9 @@ struct TensorReductionEvaluatorBase::reduce(*this, firstIndex + i * num_values_to_reduce, 70 | ++ Self::CoeffReturnType a = internal::InnerMostDimReducer::reduce(*this, firstIndex + i * num_values_to_reduce, 71 | + num_values_to_reduce, reducer); 72 | ++ values[i] = a; 73 | + } 74 | + } else if (PreservingInnerMostDims) { 75 | + const Index firstIndex = firstInput(index); 76 | +@@ -852,12 +853,14 @@ struct TensorReductionEvaluatorBase(values); 93 | +-- 94 | +2.27.0 95 | + 96 | diff --git a/third_party/eigen3/workspace.bzl b/third_party/eigen3/workspace.bzl 97 | index f06eca368c1..40bdebd6796 100644 98 | --- a/third_party/eigen3/workspace.bzl 99 | +++ b/third_party/eigen3/workspace.bzl 100 | @@ -14,6 +14,7 @@ def repo(): 101 | tf_http_archive( 102 | name = "eigen_archive", 103 | build_file = "//third_party/eigen3:eigen_archive.BUILD", 104 | + patch_file = ["//third_party/eigen3:0001-workaround-ice-failure-while-building-from-source-wi.patch"], 105 | sha256 = EIGEN_SHA256, 106 | strip_prefix = "eigen-{commit}".format(commit = EIGEN_COMMIT), 107 | urls = tf_mirror_urls("https://gitlab.com/libeigen/eigen/-/archive/{commit}/eigen-{commit}.tar.gz".format(commit = EIGEN_COMMIT)), 108 | -------------------------------------------------------------------------------- /recipe/patches/0037-add-absl_tracing_internal.patch: -------------------------------------------------------------------------------- 1 | From 26467a0a05242d0a46f5d3abdaaff9050bc06ad3 Mon Sep 17 00:00:00 2001 2 | From: Isuru Fernando 3 | Date: Wed, 6 Aug 2025 15:36:13 +0000 4 | Subject: [PATCH 37/41] add absl_tracing_internal 5 | 6 | --- 7 | tensorflow/core/kernels/batching_util/BUILD | 1 + 8 | third_party/absl/system.absl.base.BUILD | 7 +++++++ 9 | 2 files changed, 8 insertions(+) 10 | 11 | diff --git a/tensorflow/core/kernels/batching_util/BUILD b/tensorflow/core/kernels/batching_util/BUILD 12 | index f953c4de7e7..2b5853055b3 100644 13 | --- a/tensorflow/core/kernels/batching_util/BUILD 14 | +++ b/tensorflow/core/kernels/batching_util/BUILD 15 | @@ -466,6 +466,7 @@ cc_library( 16 | "@com_google_absl//absl/synchronization", 17 | "@com_google_absl//absl/time", 18 | "@com_google_absl//absl/types:optional", 19 | + "@com_google_absl//absl/base:tracing_internal", 20 | "@local_tsl//tsl/platform:criticality", 21 | ], 22 | ) 23 | diff --git a/third_party/absl/system.absl.base.BUILD b/third_party/absl/system.absl.base.BUILD 24 | index b31592d31a3..6534793637c 100644 25 | --- a/third_party/absl/system.absl.base.BUILD 26 | +++ b/third_party/absl/system.absl.base.BUILD 27 | @@ -25,6 +25,13 @@ cc_library( 28 | ], 29 | ) 30 | 31 | +cc_library( 32 | + name = "tracing_internal", 33 | + linkopts = [ 34 | + "-labsl_tracing_internal", 35 | + ], 36 | +) 37 | + 38 | cc_library( 39 | name = "nullability", 40 | deps = [ 41 | -------------------------------------------------------------------------------- /recipe/patches/0038-Fix-building-different-python-wheels-from-one-python.patch: -------------------------------------------------------------------------------- 1 | From 80841e060482db5645edd9db41aa2e5132dfe076 Mon Sep 17 00:00:00 2001 2 | From: Isuru Fernando 3 | Date: Wed, 6 Aug 2025 17:50:08 +0000 4 | Subject: [PATCH 38/41] Fix building different python wheels from one python 5 | 6 | --- 7 | tensorflow/tools/pip_package/build_pip_package.py | 11 ++++++++++- 8 | tensorflow/tools/pip_package/utils/tf_wheel.bzl | 1 + 9 | 2 files changed, 11 insertions(+), 1 deletion(-) 10 | 11 | diff --git a/tensorflow/tools/pip_package/build_pip_package.py b/tensorflow/tools/pip_package/build_pip_package.py 12 | index e254358d435..0f8426a411a 100644 13 | --- a/tensorflow/tools/pip_package/build_pip_package.py 14 | +++ b/tensorflow/tools/pip_package/build_pip_package.py 15 | @@ -60,6 +60,9 @@ def parse_args() -> argparse.Namespace: 16 | required=True, 17 | help="Platform name to be passed to setup.py", 18 | ) 19 | + parser.add_argument( 20 | + "--full-wheel-name", 21 | + ) 22 | parser.add_argument( 23 | "--headers", help="header files for the wheel", action="append" 24 | ) 25 | @@ -419,6 +422,7 @@ def build_wheel( 26 | project_name: str, 27 | platform: str, 28 | collab: str = False, 29 | + full_wheel_name: str = "", 30 | ) -> None: 31 | """Build the wheel in the target directory. 32 | 33 | @@ -444,13 +448,17 @@ def build_wheel( 34 | sys.executable, 35 | "tensorflow/tools/pip_package/setup.py", 36 | "bdist_wheel", 37 | - f"--dist-dir={dir_path}", 38 | + f"--dist-dir={dir_path}.bak", 39 | f"--plat-name={platform}", 40 | ], 41 | check=True, 42 | cwd=cwd, 43 | env=env, 44 | ) 45 | + wheels = list(glob.glob(f"{dir_path}.bak/*.whl")) 46 | + assert len(wheels) == 1 47 | + for f in wheels: 48 | + os.rename(f, f"{dir_path}/{full_wheel_name}") 49 | 50 | 51 | if __name__ == "__main__": 52 | @@ -472,6 +480,7 @@ if __name__ == "__main__": 53 | args.project_name, 54 | args.platform, 55 | args.collab, 56 | + args.full_wheel_name, 57 | ) 58 | finally: 59 | temp_dir.cleanup() 60 | diff --git a/tensorflow/tools/pip_package/utils/tf_wheel.bzl b/tensorflow/tools/pip_package/utils/tf_wheel.bzl 61 | index 057779e946e..dd34695b5dd 100644 62 | --- a/tensorflow/tools/pip_package/utils/tf_wheel.bzl 63 | +++ b/tensorflow/tools/pip_package/utils/tf_wheel.bzl 64 | @@ -91,6 +91,7 @@ def _tf_wheel_impl(ctx): 65 | ctx.attr.platform_name, 66 | ctx.attr.platform_tag, 67 | )) 68 | + args.add("--full-wheel-name", full_wheel_name) 69 | args.add("--collab", str(WHEEL_COLLAB)) 70 | args.add("--output-name", wheel_dir) 71 | args.add("--version", VERSION) 72 | -------------------------------------------------------------------------------- /recipe/patches/0039-Fix-matmul-unused-result-error.patch: -------------------------------------------------------------------------------- 1 | From 1f37dd0417cfe833272399a629bd208729e94d92 Mon Sep 17 00:00:00 2001 2 | From: =?UTF-8?q?Jakub=20Klinkovsk=C3=BD?= 3 | Date: Wed, 6 Aug 2025 18:16:06 +0000 4 | Subject: [PATCH 39/41] Fix matmul unused result error 5 | 6 | --- 7 | .../toco/graph_transformations/resolve_tensorflow_matmul.cc | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/tensorflow/lite/toco/graph_transformations/resolve_tensorflow_matmul.cc b/tensorflow/lite/toco/graph_transformations/resolve_tensorflow_matmul.cc 11 | index ccca3f7a2e0..e961891dda4 100644 12 | --- a/tensorflow/lite/toco/graph_transformations/resolve_tensorflow_matmul.cc 13 | +++ b/tensorflow/lite/toco/graph_transformations/resolve_tensorflow_matmul.cc 14 | @@ -174,7 +174,7 @@ TransposeOperator* FindTransposeOpWithInput(const Model& model, 15 | fc_op->outputs = matmul_op->outputs; 16 | 17 | // Insert the newly constructed FullyConnectedOperator. 18 | - model->operators.emplace(matmul_it, fc_op) + 1; 19 | + model->operators.emplace(matmul_it, fc_op); 20 | 21 | // Find the op producing the array passed to this MatMul 22 | auto previous_op_it = model->operators.begin(); 23 | -------------------------------------------------------------------------------- /recipe/patches/0040-Support-cuda-12.8.patch: -------------------------------------------------------------------------------- 1 | From 6e4e98cd941c790f5698ff0111ed632b676bbf7c Mon Sep 17 00:00:00 2001 2 | From: oqs <2227-loqs@users.noreply.gitlab.archlinux.org> 3 | Date: Wed, 6 Aug 2025 19:34:12 +0000 4 | Subject: [PATCH 40/41] Support cuda 12.8 5 | 6 | Co-authored-by: Isuru Fernando 7 | --- 8 | tensorflow/core/kernels/gpu_prim.h | 19 +++++++------------ 9 | third_party/xla/xla/service/gpu/gpu_prim.h | 6 ++---- 10 | 2 files changed, 9 insertions(+), 16 deletions(-) 11 | 12 | diff --git a/tensorflow/core/kernels/gpu_prim.h b/tensorflow/core/kernels/gpu_prim.h 13 | index bef22b50ada..821b476cce9 100644 14 | --- a/tensorflow/core/kernels/gpu_prim.h 15 | +++ b/tensorflow/core/kernels/gpu_prim.h 16 | @@ -37,32 +37,27 @@ namespace gpuprim = ::cub; 17 | 18 | // Required for sorting Eigen::half and bfloat16. 19 | namespace cub { 20 | -template <> 21 | -__device__ __forceinline__ void ThreadStoreVolatilePtr( 22 | +__device__ __forceinline__ void ThreadStoreVolatilePtr( 23 | Eigen::half *ptr, Eigen::half val, Int2Type /*is_primitive*/) { 24 | *reinterpret_cast(ptr) = 25 | Eigen::numext::bit_cast(val); 26 | } 27 | 28 | -template <> 29 | -__device__ __forceinline__ Eigen::half ThreadLoadVolatilePointer( 30 | - Eigen::half *ptr, Int2Type /*is_primitive*/) { 31 | - uint16_t result = *reinterpret_cast(ptr); 32 | +__device__ __forceinline__ Eigen::half ThreadLoadVolatilePointer( 33 | + const Eigen::half *ptr, Int2Type /*is_primitive*/) { 34 | + uint16_t result = *reinterpret_cast(ptr); 35 | return Eigen::numext::bit_cast(result); 36 | } 37 | 38 | -template <> 39 | -__device__ __forceinline__ void ThreadStoreVolatilePtr( 40 | +__device__ __forceinline__ void ThreadStoreVolatilePtr( 41 | Eigen::bfloat16 *ptr, Eigen::bfloat16 val, 42 | Int2Type /*is_primitive*/) { 43 | *reinterpret_cast(ptr) = 44 | Eigen::numext::bit_cast(val); 45 | } 46 | 47 | -template <> 48 | -__device__ __forceinline__ Eigen::bfloat16 49 | -ThreadLoadVolatilePointer(Eigen::bfloat16 *ptr, 50 | - Int2Type /*is_primitive*/) { 51 | +__device__ __forceinline__ Eigen::bfloat16 ThreadLoadVolatilePointer( 52 | + Eigen::bfloat16 *ptr, Int2Type /*is_primitive*/) { 53 | uint16_t result = *reinterpret_cast(ptr); 54 | return Eigen::numext::bit_cast(result); 55 | } 56 | diff --git a/third_party/xla/xla/service/gpu/gpu_prim.h b/third_party/xla/xla/service/gpu/gpu_prim.h 57 | index 83605864498..d23453f88e1 100644 58 | --- a/third_party/xla/xla/service/gpu/gpu_prim.h 59 | +++ b/third_party/xla/xla/service/gpu/gpu_prim.h 60 | @@ -37,8 +37,7 @@ namespace gpuprim = ::cub; 61 | 62 | // Required for sorting Eigen::half and bfloat16. 63 | namespace cub { 64 | -template <> 65 | -__device__ __forceinline__ void ThreadStoreVolatilePtr( 66 | +__device__ __forceinline__ void ThreadStoreVolatilePtr( 67 | Eigen::half *ptr, Eigen::half val, Int2Type /*is_primitive*/) { 68 | *reinterpret_cast(ptr) = 69 | Eigen::numext::bit_cast(val); 70 | @@ -50,8 +49,7 @@ __device__ __forceinline__ Eigen::half ThreadLoadVolatilePointer( 71 | return Eigen::numext::bit_cast(result); 72 | } 73 | 74 | -template <> 75 | -__device__ __forceinline__ void ThreadStoreVolatilePtr( 76 | +__device__ __forceinline__ void ThreadStoreVolatilePtr( 77 | tsl::bfloat16 *ptr, tsl::bfloat16 val, Int2Type /*is_primitive*/) { 78 | *reinterpret_cast(ptr) = 79 | Eigen::numext::bit_cast(val); 80 | -------------------------------------------------------------------------------- /recipe/patches/0041-Disable-profiler.patch: -------------------------------------------------------------------------------- 1 | From 5e6f65f2fbcaef687cb91caa71ff6aea3448f584 Mon Sep 17 00:00:00 2001 2 | From: Isuru Fernando 3 | Date: Mon, 13 Oct 2025 19:52:15 +0000 4 | Subject: [PATCH 41/41] Disable profiler 5 | 6 | Enabling the profiler results in 7 | 8 | E0000 00:00:1755006819.633883 1325783 descriptor_database.cc:678] File already exists in database: tensorflow/core/example/feature.proto 9 | F0000 00:00:1755006819.633924 1325783 descriptor.cc:2519] Check failed: GeneratedDatabase()->Add(encoded_file_descriptor, size) 10 | 11 | isuruf and xhochy spent lots of time figuring out bazel and could 12 | not arrive at a solution. 13 | 14 | See https://github.com/conda-forge/tensorflow-feedstock/pull/437#issuecomment-3391979920 15 | --- 16 | tensorflow/python/profiler/profiler_client.py | 3 ++- 17 | 1 file changed, 2 insertions(+), 1 deletion(-) 18 | 19 | diff --git a/tensorflow/python/profiler/profiler_client.py b/tensorflow/python/profiler/profiler_client.py 20 | index ad931ce6d69..3fec5ea1cd5 100644 21 | --- a/tensorflow/python/profiler/profiler_client.py 22 | +++ b/tensorflow/python/profiler/profiler_client.py 23 | @@ -15,7 +15,6 @@ 24 | """Profiler client APIs.""" 25 | 26 | from tensorflow.python.framework import errors 27 | -from tensorflow.python.profiler.internal import _pywrap_profiler_plugin 28 | from tensorflow.python.util.tf_export import tf_export 29 | 30 | _GRPC_PREFIX = 'grpc://' 31 | @@ -125,6 +124,7 @@ def trace(service_addr, 32 | 'duration_ms must be greater than zero.') 33 | 34 | opts = dict(options._asdict()) if options is not None else {} 35 | + raise RuntimeError("conda-forge builds do not support the profiler plugin yet.") 36 | _pywrap_profiler_plugin.trace( 37 | _strip_addresses(service_addr, _GRPC_PREFIX), 38 | logdir, 39 | @@ -165,6 +165,7 @@ def monitor(service_addr, duration_ms, level=1): 40 | ``` 41 | 42 | """ 43 | + raise RuntimeError("conda-forge builds do not support the profiler plugin yet.") 44 | return _pywrap_profiler_plugin.monitor( 45 | _strip_prefix(service_addr, _GRPC_PREFIX), duration_ms, level, True 46 | ) 47 | -------------------------------------------------------------------------------- /recipe/patches/0042-bump-h5py-req.patch: -------------------------------------------------------------------------------- 1 | From ee4412361565a5c91225a5508211580f0b68092e Mon Sep 17 00:00:00 2001 2 | From: dslarm 3 | Date: Tue, 16 Sep 2025 19:41:25 +0000 4 | Subject: [PATCH 42/42] bump h5py req 5 | 6 | h5py and psutil are not available as a binary for 7 | the versions of h5py in tensorflow 2.18's spec, but the 8 | system won't try to build it. Bump the version so we get ones that 9 | do exist. 10 | --- 11 | .../requirements_updater/requirements.in | 3 +- 12 | requirements_lock_3_10.txt | 45 +------------------ 13 | requirements_lock_3_11.txt | 45 +------------------ 14 | requirements_lock_3_12.txt | 45 +------------------ 15 | requirements_lock_3_9.txt | 44 +----------------- 16 | 5 files changed, 10 insertions(+), 172 deletions(-) 17 | 18 | diff --git a/ci/official/requirements_updater/requirements.in b/ci/official/requirements_updater/requirements.in 19 | index 68871582683..145aab77a17 100644 20 | --- a/ci/official/requirements_updater/requirements.in 21 | +++ b/ci/official/requirements_updater/requirements.in 22 | @@ -1,7 +1,7 @@ 23 | # Note that numpy 2.1.0 does not support python 3.9 24 | numpy >= 2.0.0, < 2.2.0 25 | wheel ~= 0.41.2 26 | -h5py >= 3.11.0 27 | +h5py >= 3.12.0 28 | lit ~= 17.0.2 29 | opt_einsum == 3.3.0 30 | astunparse == 1.6.3 31 | @@ -23,6 +23,7 @@ tensorboard ~= 2.19.0 32 | # Test dependencies 33 | grpcio >= 1.24.3, < 2.0 34 | portpicker == 1.6.0 35 | +psutil >=7.0.0 36 | scipy >= 1.13.0 37 | requests >= 2.31.0 38 | packaging==23.2 39 | diff --git a/requirements_lock_3_10.txt b/requirements_lock_3_10.txt 40 | index 2466c99b550..b5ac15546b9 100644 41 | --- a/requirements_lock_3_10.txt 42 | +++ b/requirements_lock_3_10.txt 43 | @@ -185,31 +185,7 @@ grpcio==1.64.1 \ 44 | # via 45 | # -r ci/official/requirements_updater/requirements.in 46 | # tensorboard 47 | -h5py==3.11.0 \ 48 | - --hash=sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e \ 49 | - --hash=sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731 \ 50 | - --hash=sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892 \ 51 | - --hash=sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3 \ 52 | - --hash=sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1 \ 53 | - --hash=sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea \ 54 | - --hash=sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b \ 55 | - --hash=sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62 \ 56 | - --hash=sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150 \ 57 | - --hash=sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007 \ 58 | - --hash=sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00 \ 59 | - --hash=sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9 \ 60 | - --hash=sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76 \ 61 | - --hash=sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab \ 62 | - --hash=sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb \ 63 | - --hash=sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba \ 64 | - --hash=sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5 \ 65 | - --hash=sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3 \ 66 | - --hash=sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972 \ 67 | - --hash=sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc \ 68 | - --hash=sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0 69 | - # via 70 | - # -r ci/official/requirements_updater/requirements.in 71 | - # keras 72 | +h5py>=3.12.0 73 | idna==3.7 \ 74 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 75 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 76 | @@ -582,24 +558,7 @@ protobuf==4.25.3 \ 77 | --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ 78 | --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 79 | # via tensorboard 80 | -psutil==5.9.8 \ 81 | - --hash=sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d \ 82 | - --hash=sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73 \ 83 | - --hash=sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8 \ 84 | - --hash=sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2 \ 85 | - --hash=sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e \ 86 | - --hash=sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36 \ 87 | - --hash=sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7 \ 88 | - --hash=sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c \ 89 | - --hash=sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee \ 90 | - --hash=sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421 \ 91 | - --hash=sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf \ 92 | - --hash=sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81 \ 93 | - --hash=sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0 \ 94 | - --hash=sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631 \ 95 | - --hash=sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4 \ 96 | - --hash=sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8 97 | - # via portpicker 98 | +psutil>=7.0.0 99 | pyelftools==0.31 \ 100 | --hash=sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99 \ 101 | --hash=sha256:f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607 102 | diff --git a/requirements_lock_3_11.txt b/requirements_lock_3_11.txt 103 | index bf067084003..4a9166b8fdd 100644 104 | --- a/requirements_lock_3_11.txt 105 | +++ b/requirements_lock_3_11.txt 106 | @@ -185,31 +185,7 @@ grpcio==1.64.1 \ 107 | # via 108 | # -r ci/official/requirements_updater/requirements.in 109 | # tensorboard 110 | -h5py==3.11.0 \ 111 | - --hash=sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e \ 112 | - --hash=sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731 \ 113 | - --hash=sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892 \ 114 | - --hash=sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3 \ 115 | - --hash=sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1 \ 116 | - --hash=sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea \ 117 | - --hash=sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b \ 118 | - --hash=sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62 \ 119 | - --hash=sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150 \ 120 | - --hash=sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007 \ 121 | - --hash=sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00 \ 122 | - --hash=sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9 \ 123 | - --hash=sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76 \ 124 | - --hash=sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab \ 125 | - --hash=sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb \ 126 | - --hash=sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba \ 127 | - --hash=sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5 \ 128 | - --hash=sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3 \ 129 | - --hash=sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972 \ 130 | - --hash=sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc \ 131 | - --hash=sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0 132 | - # via 133 | - # -r ci/official/requirements_updater/requirements.in 134 | - # keras 135 | +h5py==3.11.0 136 | idna==3.7 \ 137 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 138 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 139 | @@ -582,24 +558,7 @@ protobuf==4.25.3 \ 140 | --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ 141 | --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 142 | # via tensorboard 143 | -psutil==5.9.8 \ 144 | - --hash=sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d \ 145 | - --hash=sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73 \ 146 | - --hash=sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8 \ 147 | - --hash=sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2 \ 148 | - --hash=sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e \ 149 | - --hash=sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36 \ 150 | - --hash=sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7 \ 151 | - --hash=sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c \ 152 | - --hash=sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee \ 153 | - --hash=sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421 \ 154 | - --hash=sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf \ 155 | - --hash=sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81 \ 156 | - --hash=sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0 \ 157 | - --hash=sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631 \ 158 | - --hash=sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4 \ 159 | - --hash=sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8 160 | - # via portpicker 161 | +psutil>=7.0.0 162 | pyelftools==0.31 \ 163 | --hash=sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99 \ 164 | --hash=sha256:f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607 165 | diff --git a/requirements_lock_3_12.txt b/requirements_lock_3_12.txt 166 | index c7b9317ee2b..1a803ef414f 100644 167 | --- a/requirements_lock_3_12.txt 168 | +++ b/requirements_lock_3_12.txt 169 | @@ -185,31 +185,7 @@ grpcio==1.64.1 \ 170 | # via 171 | # -r ci/official/requirements_updater/requirements.in 172 | # tensorboard 173 | -h5py==3.11.0 \ 174 | - --hash=sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e \ 175 | - --hash=sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731 \ 176 | - --hash=sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892 \ 177 | - --hash=sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3 \ 178 | - --hash=sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1 \ 179 | - --hash=sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea \ 180 | - --hash=sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b \ 181 | - --hash=sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62 \ 182 | - --hash=sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150 \ 183 | - --hash=sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007 \ 184 | - --hash=sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00 \ 185 | - --hash=sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9 \ 186 | - --hash=sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76 \ 187 | - --hash=sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab \ 188 | - --hash=sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb \ 189 | - --hash=sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba \ 190 | - --hash=sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5 \ 191 | - --hash=sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3 \ 192 | - --hash=sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972 \ 193 | - --hash=sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc \ 194 | - --hash=sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0 195 | - # via 196 | - # -r ci/official/requirements_updater/requirements.in 197 | - # keras 198 | +h5py>=3.12.0 199 | idna==3.7 \ 200 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 201 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 202 | @@ -582,24 +558,7 @@ protobuf==4.25.3 \ 203 | --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ 204 | --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 205 | # via tensorboard 206 | -psutil==5.9.8 \ 207 | - --hash=sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d \ 208 | - --hash=sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73 \ 209 | - --hash=sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8 \ 210 | - --hash=sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2 \ 211 | - --hash=sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e \ 212 | - --hash=sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36 \ 213 | - --hash=sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7 \ 214 | - --hash=sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c \ 215 | - --hash=sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee \ 216 | - --hash=sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421 \ 217 | - --hash=sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf \ 218 | - --hash=sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81 \ 219 | - --hash=sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0 \ 220 | - --hash=sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631 \ 221 | - --hash=sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4 \ 222 | - --hash=sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8 223 | - # via portpicker 224 | +psutil>=7.0.0 225 | pyelftools==0.31 \ 226 | --hash=sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99 \ 227 | --hash=sha256:f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607 228 | diff --git a/requirements_lock_3_9.txt b/requirements_lock_3_9.txt 229 | index bf40d037b14..2d9911ac3c4 100644 230 | --- a/requirements_lock_3_9.txt 231 | +++ b/requirements_lock_3_9.txt 232 | @@ -185,31 +185,7 @@ grpcio==1.64.1 \ 233 | # via 234 | # -r ci/official/requirements_updater/requirements.in 235 | # tensorboard 236 | -h5py==3.11.0 \ 237 | - --hash=sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e \ 238 | - --hash=sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731 \ 239 | - --hash=sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892 \ 240 | - --hash=sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3 \ 241 | - --hash=sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1 \ 242 | - --hash=sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea \ 243 | - --hash=sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b \ 244 | - --hash=sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62 \ 245 | - --hash=sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150 \ 246 | - --hash=sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007 \ 247 | - --hash=sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00 \ 248 | - --hash=sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9 \ 249 | - --hash=sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76 \ 250 | - --hash=sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab \ 251 | - --hash=sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb \ 252 | - --hash=sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba \ 253 | - --hash=sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5 \ 254 | - --hash=sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3 \ 255 | - --hash=sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972 \ 256 | - --hash=sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc \ 257 | - --hash=sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0 258 | - # via 259 | - # -r ci/official/requirements_updater/requirements.in 260 | - # keras 261 | +h5py>=3.12.0 262 | idna==3.7 \ 263 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 264 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 265 | @@ -578,23 +554,7 @@ protobuf==4.25.3 \ 266 | --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ 267 | --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 268 | # via tensorboard 269 | -psutil==5.9.8 \ 270 | - --hash=sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d \ 271 | - --hash=sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73 \ 272 | - --hash=sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8 \ 273 | - --hash=sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2 \ 274 | - --hash=sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e \ 275 | - --hash=sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36 \ 276 | - --hash=sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7 \ 277 | - --hash=sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c \ 278 | - --hash=sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee \ 279 | - --hash=sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421 \ 280 | - --hash=sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf \ 281 | - --hash=sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81 \ 282 | - --hash=sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0 \ 283 | - --hash=sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631 \ 284 | - --hash=sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4 \ 285 | - --hash=sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8 286 | +psutil>=7.0.0 287 | # via portpicker 288 | pyelftools==0.31 \ 289 | --hash=sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99 \ 290 | -------------------------------------------------------------------------------- /recipe/patches/0043-cross-arch-config.patch: -------------------------------------------------------------------------------- 1 | From c0aad81257fd98c2a5dcfcba79cbde9a2a09c4b7 Mon Sep 17 00:00:00 2001 2 | From: dslarm 3 | Date: Tue, 14 Oct 2025 15:02:09 +0000 4 | Subject: [PATCH 43/43] cross arch config 5 | 6 | --- 7 | third_party/llvm/dirty.patch | 11 +++++++++++ 8 | third_party/llvm/workspace.bzl | 1 + 9 | 2 files changed, 12 insertions(+) 10 | create mode 100644 third_party/llvm/dirty.patch 11 | 12 | diff --git a/third_party/llvm/dirty.patch b/third_party/llvm/dirty.patch 13 | new file mode 100644 14 | index 00000000000..db99a4e597d 15 | --- /dev/null 16 | +++ b/third_party/llvm/dirty.patch 17 | @@ -0,0 +1,11 @@ 18 | +--- a/utils/bazel/llvm-project-overlay/llvm/config.bzl 2025-04-17 16:11:50.417140143 +0000 19 | ++++ b/utils/bazel/llvm-project-overlay/llvm/config.bzl 2025-04-17 16:11:54.891132310 +0000 20 | +@@ -103,7 +103,7 @@ 21 | + "@bazel_tools//src/conditions:linux_aarch64": native_arch_defines("AArch64", "aarch64-unknown-linux-gnu"), 22 | + "@bazel_tools//src/conditions:linux_ppc64le": native_arch_defines("PowerPC", "powerpc64le-unknown-linux-gnu"), 23 | + "@bazel_tools//src/conditions:linux_s390x": native_arch_defines("SystemZ", "systemz-unknown-linux_gnu"), 24 | +- "//conditions:default": native_arch_defines("X86", "x86_64-unknown-linux-gnu"), 25 | ++ "//conditions:default": native_arch_defines("AArch64", "aarch64-unknown-linux-gnu"), 26 | + }) + [ 27 | + "LLVM_VERSION_MAJOR={}".format(LLVM_VERSION_MAJOR), 28 | + "LLVM_VERSION_MINOR={}".format(LLVM_VERSION_MINOR), 29 | diff --git a/third_party/llvm/workspace.bzl b/third_party/llvm/workspace.bzl 30 | index dc3ceaddaea..924293650ca 100644 31 | --- a/third_party/llvm/workspace.bzl 32 | +++ b/third_party/llvm/workspace.bzl 33 | @@ -22,6 +22,7 @@ def repo(name): 34 | "//third_party/llvm:mathextras.patch", 35 | "//third_party/llvm:toolchains.patch", 36 | "//third_party/llvm:zstd.patch", 37 | + "//third_party/llvm:dirty.patch", 38 | ], 39 | link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"}, 40 | ) 41 | -------------------------------------------------------------------------------- /recipe/py_toolchain.bzl: -------------------------------------------------------------------------------- 1 | load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") 2 | 3 | py_runtime( 4 | name = "python3", 5 | python_version = "PY3", 6 | interpreter_path = '@@SRC_DIR@@/python.shebang', 7 | stub_shebang = '#!@@SRC_DIR@@/python.shebang', 8 | ) 9 | 10 | py_runtime_pair( 11 | name = "py_runtime_pair", 12 | py3_runtime = ":python3", 13 | ) 14 | 15 | toolchain( 16 | name = "py_toolchain", 17 | toolchain = ":py_runtime_pair", 18 | toolchain_type = "@bazel_tools//tools/python:toolchain_type", 19 | ) 20 | -------------------------------------------------------------------------------- /recipe/pybind11_protobuf/0001-DO-not-link-to-proto_api.patch: -------------------------------------------------------------------------------- 1 | From 49d4bc6c02cd579eca4659af2904ea2ce80ec3b7 Mon Sep 17 00:00:00 2001 2 | From: Mark Harfouche 3 | Date: Sun, 6 Jul 2025 16:27:57 -0400 4 | Subject: [PATCH 1/2] DO not link to proto_api 5 | 6 | --- 7 | pybind11_protobuf/BUILD | 1 - 8 | 1 file changed, 1 deletion(-) 9 | 10 | diff --git a/pybind11_protobuf/BUILD b/pybind11_protobuf/BUILD 11 | index d9abcaa..3393167 100644 12 | --- a/pybind11_protobuf/BUILD 13 | +++ b/pybind11_protobuf/BUILD 14 | @@ -46,7 +46,6 @@ pybind_library( 15 | "@com_google_absl//absl/strings", 16 | "@com_google_absl//absl/types:optional", 17 | "@com_google_protobuf//:protobuf", 18 | - "@com_google_protobuf//python:proto_api", 19 | ], 20 | ) 21 | 22 | -- 23 | 2.43.0 24 | 25 | -------------------------------------------------------------------------------- /recipe/pybind11_protobuf/0002-Add-Python-include-path.patch: -------------------------------------------------------------------------------- 1 | From 08267b0ca1485f09513dc44772b39f3eed5b450c Mon Sep 17 00:00:00 2001 2 | From: "Uwe L. Korn" 3 | Date: Tue, 29 Aug 2023 21:33:08 +0200 4 | Subject: [PATCH 2/2] Add Python include path 5 | 6 | Updated by Mark Harfouche on July 6, 2025 7 | --- 8 | pybind11_protobuf/BUILD | 6 ++++++ 9 | 1 file changed, 6 insertions(+) 10 | 11 | diff --git a/pybind11_protobuf/BUILD b/pybind11_protobuf/BUILD 12 | index 3393167..cb7b5b0 100644 13 | --- a/pybind11_protobuf/BUILD 14 | +++ b/pybind11_protobuf/BUILD 15 | @@ -67,6 +67,9 @@ cc_library( 16 | name = "check_unknown_fields", 17 | srcs = ["check_unknown_fields.cc"], 18 | hdrs = ["check_unknown_fields.h"], 19 | + copts = [ 20 | + "-I@@PREFIX@@/include/python", 21 | + ], 22 | deps = [ 23 | "@com_google_absl//absl/container:flat_hash_map", 24 | "@com_google_absl//absl/container:flat_hash_set", 25 | @@ -84,6 +87,9 @@ cc_library( 26 | visibility = [ 27 | "//visibility:public", 28 | ], 29 | + copts = [ 30 | + "-I@@PREFIX@@/include/python", 31 | + ], 32 | deps = [ 33 | ":check_unknown_fields", 34 | ], 35 | -- 36 | 2.43.0 37 | 38 | -------------------------------------------------------------------------------- /recipe/test_c.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int main() { 5 | printf("Hello from TensorFlow C library version %s\n", TF_Version()); 6 | return 0; 7 | } 8 | -------------------------------------------------------------------------------- /recipe/test_cc.cc: -------------------------------------------------------------------------------- 1 | // test tensorflow_cc 2 | // https://www.tensorflow.org/guide/extend/cc 3 | #include "tensorflow/core/public/session.h" 4 | #include "tensorflow/core/platform/env.h" 5 | #include "tensorflow/core/framework/op.h" 6 | #include "tensorflow/core/framework/op_kernel.h" 7 | #include "tensorflow/core/framework/shape_inference.h" 8 | 9 | #include "tensorflow/cc/client/client_session.h" 10 | #include "tensorflow/cc/ops/standard_ops.h" 11 | #include "tensorflow/core/framework/tensor.h" 12 | 13 | int main() { 14 | using namespace tensorflow; 15 | using namespace tensorflow::ops; 16 | Scope root = Scope::NewRootScope(); 17 | // Matrix A = [3 2; -1 0] 18 | auto A = Const(root, { {3.f, 2.f}, {-1.f, 0.f} }); 19 | // Vector b = [3 5] 20 | auto b = Const(root, { {3.f, 5.f} }); 21 | // v = Ab^T 22 | auto v = MatMul(root.WithOpName("v"), A, b, MatMul::TransposeB(true)); 23 | std::vector outputs; 24 | ClientSession session(root); 25 | // Run and fetch v 26 | TF_CHECK_OK(session.Run({v}, &outputs)); 27 | // Expect outputs[0] == [19; -3] 28 | LOG(INFO) << outputs[0].matrix(); 29 | return 0; 30 | } 31 | -------------------------------------------------------------------------------- /recipe/test_libtensorflow.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | ${CC} ${CFLAGS} ${LDFLAGS} -o test_c test_c.c -ltensorflow -labsl_status 6 | ./test_c 7 | -------------------------------------------------------------------------------- /recipe/test_libtensorflow_cc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exuo pipefail 4 | 5 | if [[ "${target_platform}" == linux-* ]]; then 6 | export LDFLAGS="${LDFLAGS} -lrt" 7 | fi 8 | export CXXFLAGS="${CXXFLAGS} -std=c++17" 9 | export CXXFLAGS="${CXXFLAGS} -I${CONDA_PREFIX}/include/tensorflow/third_party" 10 | export CXXFLAGS="${CXXFLAGS} -I${CONDA_PREFIX}/include/tensorflow/third_party/xla" 11 | ${CXX} ${CXXFLAGS} ${LDFLAGS} -o test_cc test_cc.cc -ltensorflow_cc -ltensorflow_framework -labsl_status 12 | ./test_cc 13 | -------------------------------------------------------------------------------- /recipe/test_tensorflow.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | hello = tf.constant('Hello, TensorFlow!') 3 | a = tf.constant(10) 4 | b = tf.constant(32) 5 | a + b 6 | 7 | mnist = tf.keras.datasets.mnist 8 | 9 | (x_train, y_train), (x_test, y_test) = mnist.load_data() 10 | x_train, x_test = x_train / 255.0, x_test / 255.0 11 | 12 | model = tf.keras.models.Sequential([ 13 | tf.keras.layers.Flatten(input_shape=(28, 28)), 14 | tf.keras.layers.Dense(128, activation='relu'), 15 | tf.keras.layers.Dropout(0.2), 16 | tf.keras.layers.Dense(10, activation='softmax') 17 | ]) 18 | 19 | --------------------------------------------------------------------------------