├── .github └── CODEOWNERS ├── .ci_support ├── migrations │ ├── aws_crt_cpp0354.yaml │ ├── brotli12.yaml │ ├── libboost188.yaml │ ├── absl_grpc_proto.yaml │ ├── absl_grpc_proto_25Q2.yaml │ └── cuda118.yaml ├── README ├── win_64_cuda_compiler_version11.8.yaml ├── win_64_cuda_compiler_versionNone.yaml ├── osx_64_.yaml ├── osx_arm64_.yaml ├── linux_64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml ├── linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml ├── linux_aarch64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml ├── linux_ppc64le_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml ├── linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml └── linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml ├── recipe ├── conda_build_config.yaml ├── patches │ ├── 0002-skip-NonExistentBucket-test-on-osx.patch │ ├── 0003-Change-xsimd-inclusion-criterion.patch │ ├── 0001-disable-gcsfs_test.patch │ └── 0004-GH-48260-C-Python-R-Move-S3-bucket-references-to-new.patch ├── bld.bat ├── LLVM_LICENSE.txt ├── activate.sh ├── build.sh ├── install-libarrow.sh ├── install-libarrow.bat └── meta.yaml ├── conda-forge.yml ├── .circleci └── config.yml ├── .gitattributes ├── .gitignore ├── .scripts ├── logging_utils.sh ├── build_steps.sh ├── run_docker_build.sh ├── run_osx_build.sh └── run_win_build.bat ├── .azure-pipelines ├── azure-pipelines-osx.yml ├── azure-pipelines-win.yml └── azure-pipelines-linux.yml ├── azure-pipelines.yml ├── LICENSE.txt └── README.md /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @cpcloud @h-vetinari @jakirkham @jreback @kou @kszucs @leifwalsh @nealrichardson @pcmoritz @pearu @pitrou @raulcd @robertnishihara @siddharthteotia @wesm @xhochy -------------------------------------------------------------------------------- /.ci_support/migrations/aws_crt_cpp0354.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | commit_message: Rebuild for aws_crt_cpp 0.35.4 4 | kind: version 5 | migration_number: 1 6 | aws_crt_cpp: 7 | - 0.35.4 8 | migrator_ts: 1765205792.4486868 9 | -------------------------------------------------------------------------------- /.ci_support/migrations/brotli12.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | commit_message: Rebuild for brotli 1.2 4 | kind: version 5 | migration_number: 1 6 | brotli: 7 | - '1.2' 8 | libbrotlicommon: 9 | - '1.2' 10 | libbrotlidec: 11 | - '1.2' 12 | libbrotlienc: 13 | - '1.2' 14 | migrator_ts: 1761598392.2337856 15 | -------------------------------------------------------------------------------- /.ci_support/migrations/libboost188.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | kind: version 4 | commit_message: "Rebuild for libboost 1.88" 5 | migration_number: 1 6 | libboost_devel: 7 | - "1.88" 8 | libboost_headers: 9 | - "1.88" 10 | libboost_python_devel: 11 | - "1.88" 12 | migrator_ts: 1753251695.3315456 13 | -------------------------------------------------------------------------------- /recipe/conda_build_config.yaml: -------------------------------------------------------------------------------- 1 | # avoid wrongly picking up multiple versions 2 | libarrow: 3 | - "*" 4 | libarrow_all: 5 | - "*" 6 | 7 | # on osx, keep this in sync with llvm_version in meta.yaml. 8 | c_compiler_version: # [osx] 9 | - 19 # [osx] 10 | cxx_compiler_version: # [osx] 11 | - 19 # [osx] 12 | -------------------------------------------------------------------------------- /.ci_support/README: -------------------------------------------------------------------------------- 1 | This file is automatically generated by conda-smithy. If any 2 | particular build configuration is expected, but it is not found, 3 | please make sure all dependencies are satisfiable. To add/modify any 4 | matrix elements, you should create/change conda-smithy's input 5 | recipe/conda_build_config.yaml and re-render the recipe, rather than 6 | editing these files directly. 7 | -------------------------------------------------------------------------------- /.ci_support/migrations/absl_grpc_proto.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | commit_message: Rebuild for libabseil 20250127, libgrpc 1.71 & libprotobuf 5.29.3 4 | kind: version 5 | migration_number: 1 6 | exclude: 7 | - abseil-cpp 8 | - grpc-cpp 9 | - libprotobuf 10 | - protobuf 11 | - re2 12 | libabseil: 13 | - 20250127 14 | libgrpc: 15 | - "1.71" 16 | libprotobuf: 17 | - 5.29.3 18 | # see https://github.com/grpc/grpc/commit/14ac94d923b80650e0df55bed17be5efa0e4becd 19 | c_stdlib_version: # [osx and x86_64] 20 | - 10.14 # [osx and x86_64] 21 | migrator_ts: 1741118046.5882597 22 | -------------------------------------------------------------------------------- /conda-forge.yml: -------------------------------------------------------------------------------- 1 | azure: 2 | free_disk_space: true 3 | max_parallel: 20 4 | settings_win: 5 | variables: 6 | CONDA_BLD_PATH: C:\\bld\\ 7 | MINIFORGE_HOME: C:\\Miniforge 8 | bot: 9 | abi_migration_branches: 10 | - 21.x 11 | - 20.x 12 | - 19.x 13 | build_platform: 14 | linux_aarch64: linux_64 15 | linux_ppc64le: linux_64 16 | osx_arm64: osx_64 17 | conda_build: 18 | pkg_format: '2' 19 | conda_forge_output_validation: true 20 | github: 21 | branch_name: main 22 | tooling_branch_name: main 23 | provider: 24 | linux_aarch64: azure 25 | linux_ppc64le: azure 26 | test: native_and_emulated 27 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: jinja-yaml -*- 4 | 5 | version: 2 6 | 7 | jobs: 8 | build: 9 | working_directory: ~/test 10 | machine: 11 | image: ubuntu-2004:current 12 | steps: 13 | - run: 14 | # The Circle-CI build should not be active, but if this is not true for some reason, do a fast finish. 15 | command: exit 0 16 | 17 | workflows: 18 | version: 2 19 | build_and_test: 20 | jobs: 21 | - build: 22 | filters: 23 | branches: 24 | ignore: 25 | - /.*/ 26 | -------------------------------------------------------------------------------- /.ci_support/migrations/absl_grpc_proto_25Q2.yaml: -------------------------------------------------------------------------------- 1 | __migrator: 2 | build_number: 1 3 | commit_message: Rebuild for libabseil 20250512, libgrpc 1.73 & libprotobuf 6.31.1 4 | kind: version 5 | migration_number: 1 6 | exclude: 7 | # core deps 8 | - abseil-cpp 9 | - grpc-cpp 10 | - libprotobuf 11 | # required for building/testing 12 | - protobuf 13 | - re2 14 | # bazel stack 15 | - bazel 16 | - grpc_java_plugin 17 | - singlejar 18 | libabseil: 19 | - 20250512 20 | libgrpc: 21 | - "1.73" 22 | libprotobuf: 23 | - 6.31.1 24 | # we need to leave this migration open until we're ready to move the global baseline, see 25 | # https://github.com/conda-forge/conda-forge.github.io/issues/2467; grpc 1.72 requires 11.0, 26 | # see https://github.com/grpc/grpc/commit/f122d248443c81592e748da1adb240cbf0a0231c 27 | c_stdlib_version: # [osx] 28 | - 11.0 # [osx] 29 | migrator_ts: 1748506837.6039238 30 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | 3 | *.patch binary 4 | *.diff binary 5 | meta.yaml text eol=lf 6 | build.sh text eol=lf 7 | bld.bat text eol=crlf 8 | 9 | # github helper pieces to make some files not show up in diffs automatically 10 | .azure-pipelines/* linguist-generated=true 11 | .circleci/* linguist-generated=true 12 | .ci_support/README linguist-generated=true 13 | .drone/* linguist-generated=true 14 | .drone.yml linguist-generated=true 15 | .github/* linguist-generated=true 16 | .travis/* linguist-generated=true 17 | .appveyor.yml linguist-generated=true 18 | .gitattributes linguist-generated=true 19 | .gitignore linguist-generated=true 20 | .travis.yml linguist-generated=true 21 | .scripts/* linguist-generated=true 22 | .woodpecker.yml linguist-generated=true 23 | /LICENSE.txt linguist-generated=true 24 | /README.md linguist-generated=true 25 | azure-pipelines.yml linguist-generated=true 26 | build-locally.py linguist-generated=true 27 | pixi.toml linguist-generated=true 28 | shippable.yml linguist-generated=true 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # User content belongs under recipe/. 2 | # Feedstock configuration goes in `conda-forge.yml` 3 | # Everything else is managed by the conda-smithy rerender process. 4 | # Please do not modify 5 | 6 | # Ignore all files and folders in root 7 | * 8 | !/conda-forge.yml 9 | 10 | # Don't ignore any files/folders if the parent folder is 'un-ignored' 11 | # This also avoids warnings when adding an already-checked file with an ignored parent. 12 | !/**/ 13 | # Don't ignore any files/folders recursively in the following folders 14 | !/recipe/** 15 | !/.ci_support/** 16 | 17 | # Since we ignore files/folders recursively, any folders inside 18 | # build_artifacts gets ignored which trips some build systems. 19 | # To avoid that we 'un-ignore' all files/folders recursively 20 | # and only ignore the root build_artifacts folder. 21 | !/build_artifacts/** 22 | /build_artifacts 23 | 24 | *.pyc 25 | 26 | # Rattler-build's artifacts are in `output` when not specifying anything. 27 | /output 28 | # Pixi's configuration 29 | .pixi 30 | -------------------------------------------------------------------------------- /.scripts/logging_utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Provide a unified interface for the different logging 4 | # utilities CI providers offer. If unavailable, provide 5 | # a compatible fallback (e.g. bare `echo xxxxxx`). 6 | 7 | function startgroup { 8 | # Start a foldable group of log lines 9 | # Pass a single argument, quoted 10 | case ${CI:-} in 11 | azure ) 12 | echo "##[group]$1";; 13 | travis ) 14 | echo "$1" 15 | echo -en 'travis_fold:start:'"${1// /}"'\r';; 16 | github_actions ) 17 | echo "::group::$1";; 18 | * ) 19 | echo "$1";; 20 | esac 21 | } 2> /dev/null 22 | 23 | function endgroup { 24 | # End a foldable group of log lines 25 | # Pass a single argument, quoted 26 | 27 | case ${CI:-} in 28 | azure ) 29 | echo "##[endgroup]";; 30 | travis ) 31 | echo -en 'travis_fold:end:'"${1// /}"'\r';; 32 | github_actions ) 33 | echo "::endgroup::";; 34 | esac 35 | } 2> /dev/null 36 | -------------------------------------------------------------------------------- /recipe/patches/0002-skip-NonExistentBucket-test-on-osx.patch: -------------------------------------------------------------------------------- 1 | From 18258d1295a5e95426ec7b52a6412427d17b114d Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sun, 24 Nov 2024 20:22:35 +1100 4 | Subject: [PATCH 2/4] skip NonExistentBucket test on osx 5 | 6 | --- 7 | cpp/src/arrow/filesystem/s3fs_test.cc | 4 ++++ 8 | 1 file changed, 4 insertions(+) 9 | 10 | diff --git a/cpp/src/arrow/filesystem/s3fs_test.cc b/cpp/src/arrow/filesystem/s3fs_test.cc 11 | index f0a5d0e2e4..b5578b4f74 100644 12 | --- a/cpp/src/arrow/filesystem/s3fs_test.cc 13 | +++ b/cpp/src/arrow/filesystem/s3fs_test.cc 14 | @@ -438,6 +438,10 @@ TEST_F(S3RegionResolutionTest, RestrictedBucket) { 15 | } 16 | 17 | TEST_F(S3RegionResolutionTest, NonExistentBucket) { 18 | +#ifdef __APPLE__ 19 | + // spuriously fails, see https://github.com/apache/arrow/issues/35587#issuecomment-2495637658 20 | + GTEST_SKIP(); 21 | +#endif 22 | auto maybe_region = ResolveS3BucketRegion("ursa-labs-nonexistent-bucket"); 23 | ASSERT_RAISES(IOError, maybe_region); 24 | ASSERT_THAT(maybe_region.status().message(), 25 | -------------------------------------------------------------------------------- /recipe/patches/0003-Change-xsimd-inclusion-criterion.patch: -------------------------------------------------------------------------------- 1 | From 37f86532af653cb230ff3b6c8040d6a8577fd3a4 Mon Sep 17 00:00:00 2001 2 | From: AntoinePrv 3 | Date: Mon, 27 Oct 2025 10:25:58 +0100 4 | Subject: [PATCH 3/4] Change xsimd inclusion criterion 5 | 6 | --- 7 | cpp/src/arrow/util/byte_stream_split_internal.h | 4 +++- 8 | 1 file changed, 3 insertions(+), 1 deletion(-) 9 | 10 | diff --git a/cpp/src/arrow/util/byte_stream_split_internal.h b/cpp/src/arrow/util/byte_stream_split_internal.h 11 | index 70f9b87d6c..1ba2d5335b 100644 12 | --- a/cpp/src/arrow/util/byte_stream_split_internal.h 13 | +++ b/cpp/src/arrow/util/byte_stream_split_internal.h 14 | @@ -29,7 +29,9 @@ 15 | #include 16 | #include 17 | 18 | -#if defined(ARROW_HAVE_NEON) || defined(ARROW_HAVE_SSE4_2) 19 | +// ARROW_HAVE_RUNTIME_SSE4_2 is used on x86-64 to include ARROW_HAVE_SSE4_2 and 20 | +// ARROW_RUNTIME_SIMD_LEVEL != NONE. 21 | +#if defined(ARROW_HAVE_NEON) || defined(ARROW_HAVE_RUNTIME_SSE4_2) 22 | # include 23 | # define ARROW_HAVE_SIMD_SPLIT 24 | #endif 25 | -------------------------------------------------------------------------------- /.ci_support/win_64_cuda_compiler_version11.8.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | brotli: 6 | - '1.2' 7 | bzip2: 8 | - '1' 9 | c_compiler: 10 | - vs2022 11 | c_stdlib: 12 | - vs 13 | channel_sources: 14 | - conda-forge 15 | channel_targets: 16 | - conda-forge main 17 | cuda_compiler: 18 | - nvcc 19 | cuda_compiler_version: 20 | - '11.8' 21 | cuda_compiler_version_min: 22 | - '11.8' 23 | cxx_compiler: 24 | - vs2022 25 | gflags: 26 | - '2.2' 27 | glog: 28 | - '0.7' 29 | libabseil: 30 | - '20250512' 31 | libarrow: 32 | - '*' 33 | libboost_devel: 34 | - '1.88' 35 | libbrotlicommon: 36 | - '1.2' 37 | libcrc32c: 38 | - '1.1' 39 | libcurl: 40 | - '8' 41 | libgoogle_cloud_devel: 42 | - '2.39' 43 | libgoogle_cloud_storage_devel: 44 | - '2.39' 45 | libgrpc: 46 | - '1.73' 47 | libprotobuf: 48 | - 6.31.1 49 | libutf8proc: 50 | - '2.11' 51 | lz4_c: 52 | - '1.10' 53 | openssl: 54 | - '3.5' 55 | orc: 56 | - 2.2.1 57 | re2: 58 | - 2025.08.12 59 | snappy: 60 | - '1.2' 61 | target_platform: 62 | - win-64 63 | thrift_cpp: 64 | - 0.22.0 65 | zip_keys: 66 | - - cuda_compiler_version 67 | - cuda_compiler 68 | zlib: 69 | - '1' 70 | zstd: 71 | - '1.5' 72 | -------------------------------------------------------------------------------- /.ci_support/win_64_cuda_compiler_versionNone.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | brotli: 6 | - '1.2' 7 | bzip2: 8 | - '1' 9 | c_compiler: 10 | - vs2022 11 | c_stdlib: 12 | - vs 13 | channel_sources: 14 | - conda-forge 15 | channel_targets: 16 | - conda-forge main 17 | cuda_compiler: 18 | - cuda-nvcc 19 | cuda_compiler_version: 20 | - None 21 | cuda_compiler_version_min: 22 | - '11.8' 23 | cxx_compiler: 24 | - vs2022 25 | gflags: 26 | - '2.2' 27 | glog: 28 | - '0.7' 29 | libabseil: 30 | - '20250512' 31 | libarrow: 32 | - '*' 33 | libboost_devel: 34 | - '1.88' 35 | libbrotlicommon: 36 | - '1.2' 37 | libcrc32c: 38 | - '1.1' 39 | libcurl: 40 | - '8' 41 | libgoogle_cloud_devel: 42 | - '2.39' 43 | libgoogle_cloud_storage_devel: 44 | - '2.39' 45 | libgrpc: 46 | - '1.73' 47 | libprotobuf: 48 | - 6.31.1 49 | libutf8proc: 50 | - '2.11' 51 | lz4_c: 52 | - '1.10' 53 | openssl: 54 | - '3.5' 55 | orc: 56 | - 2.2.1 57 | re2: 58 | - 2025.08.12 59 | snappy: 60 | - '1.2' 61 | target_platform: 62 | - win-64 63 | thrift_cpp: 64 | - 0.22.0 65 | zip_keys: 66 | - - cuda_compiler_version 67 | - cuda_compiler 68 | zlib: 69 | - '1' 70 | zstd: 71 | - '1.5' 72 | -------------------------------------------------------------------------------- /recipe/patches/0001-disable-gcsfs_test.patch: -------------------------------------------------------------------------------- 1 | From b7b6283fd8a9905d959a0f25f97293750aa7669c Mon Sep 17 00:00:00 2001 2 | From: "H. Vetinari" 3 | Date: Sat, 2 Nov 2024 15:41:34 +1100 4 | Subject: [PATCH 1/4] disable gcsfs_test 5 | 6 | it cannot work unless we package https://github.com/googleapis/storage-testbench, 7 | which however has extremely tight dependencies on protobuf etc., making it very 8 | hard to fit this into our migration patterns 9 | --- 10 | cpp/src/arrow/filesystem/CMakeLists.txt | 8 -------- 11 | 1 file changed, 8 deletions(-) 12 | 13 | diff --git a/cpp/src/arrow/filesystem/CMakeLists.txt b/cpp/src/arrow/filesystem/CMakeLists.txt 14 | index 5250ed2a88..ba053bd501 100644 15 | --- a/cpp/src/arrow/filesystem/CMakeLists.txt 16 | +++ b/cpp/src/arrow/filesystem/CMakeLists.txt 17 | @@ -42,14 +42,6 @@ if(ARROW_BUILD_BENCHMARKS) 18 | ${ARROW_BENCHMARK_LINK_LIBS}) 19 | endif() 20 | 21 | -if(ARROW_GCS) 22 | - add_arrow_test(gcsfs_test 23 | - EXTRA_LABELS 24 | - filesystem 25 | - EXTRA_LINK_LIBS 26 | - google-cloud-cpp::storage) 27 | -endif() 28 | - 29 | if(ARROW_AZURE) 30 | add_arrow_test(azurefs_test 31 | EXTRA_LABELS 32 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-osx.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | jobs: 6 | - job: osx 7 | pool: 8 | vmImage: macOS-15 9 | strategy: 10 | matrix: 11 | osx_64_: 12 | CONFIG: osx_64_ 13 | UPLOAD_PACKAGES: 'True' 14 | osx_arm64_: 15 | CONFIG: osx_arm64_ 16 | UPLOAD_PACKAGES: 'True' 17 | timeoutInMinutes: 360 18 | variables: {} 19 | 20 | steps: 21 | # TODO: Fast finish on azure pipelines? 22 | - script: | 23 | export CI=azure 24 | export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) 25 | export remote_url=$(Build.Repository.Uri) 26 | export sha=$(Build.SourceVersion) 27 | export OSX_FORCE_SDK_DOWNLOAD="1" 28 | export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME 29 | export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) 30 | if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then 31 | export IS_PR_BUILD="True" 32 | else 33 | export IS_PR_BUILD="False" 34 | fi 35 | ./.scripts/run_osx_build.sh 36 | displayName: Run OSX build 37 | env: 38 | BINSTAR_TOKEN: $(BINSTAR_TOKEN) 39 | FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) 40 | STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) 41 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-win.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | jobs: 6 | - job: win 7 | pool: 8 | vmImage: windows-2022 9 | strategy: 10 | matrix: 11 | win_64_cuda_compiler_version11.8: 12 | CONFIG: win_64_cuda_compiler_version11.8 13 | UPLOAD_PACKAGES: 'True' 14 | win_64_cuda_compiler_versionNone: 15 | CONFIG: win_64_cuda_compiler_versionNone 16 | UPLOAD_PACKAGES: 'True' 17 | timeoutInMinutes: 360 18 | variables: 19 | CONDA_BLD_PATH: C:\\bld\\ 20 | MINIFORGE_HOME: C:\\Miniforge 21 | UPLOAD_TEMP: D:\\tmp 22 | 23 | steps: 24 | 25 | - script: | 26 | call ".scripts\run_win_build.bat" 27 | displayName: Run Windows build 28 | env: 29 | MINIFORGE_HOME: $(MINIFORGE_HOME) 30 | CONDA_BLD_PATH: $(CONDA_BLD_PATH) 31 | PYTHONUNBUFFERED: 1 32 | CONFIG: $(CONFIG) 33 | CI: azure 34 | flow_run_id: azure_$(Build.BuildNumber).$(System.JobAttempt) 35 | remote_url: $(Build.Repository.Uri) 36 | sha: $(Build.SourceVersion) 37 | UPLOAD_PACKAGES: $(UPLOAD_PACKAGES) 38 | UPLOAD_TEMP: $(UPLOAD_TEMP) 39 | BINSTAR_TOKEN: $(BINSTAR_TOKEN) 40 | FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) 41 | STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) 42 | -------------------------------------------------------------------------------- /.ci_support/osx_64_.yaml: -------------------------------------------------------------------------------- 1 | MACOSX_DEPLOYMENT_TARGET: 2 | - '11.0' 3 | MACOSX_SDK_VERSION: 4 | - '11.0' 5 | aws_crt_cpp: 6 | - 0.35.4 7 | aws_sdk_cpp: 8 | - 1.11.606 9 | azure_core_cpp: 10 | - 1.16.1 11 | azure_identity_cpp: 12 | - 1.13.2 13 | azure_storage_blobs_cpp: 14 | - 12.15.0 15 | azure_storage_files_datalake_cpp: 16 | - 12.13.0 17 | brotli: 18 | - '1.2' 19 | bzip2: 20 | - '1' 21 | c_compiler: 22 | - clang 23 | c_compiler_version: 24 | - '19' 25 | c_stdlib: 26 | - macosx_deployment_target 27 | c_stdlib_version: 28 | - '11.0' 29 | channel_sources: 30 | - conda-forge 31 | channel_targets: 32 | - conda-forge main 33 | cuda_compiler_version: 34 | - None 35 | cxx_compiler: 36 | - clangxx 37 | cxx_compiler_version: 38 | - '19' 39 | gflags: 40 | - '2.2' 41 | glog: 42 | - '0.7' 43 | libabseil: 44 | - '20250512' 45 | libarrow: 46 | - '*' 47 | libboost_devel: 48 | - '1.88' 49 | libbrotlicommon: 50 | - '1.2' 51 | libgoogle_cloud_devel: 52 | - '2.39' 53 | libgoogle_cloud_storage_devel: 54 | - '2.39' 55 | libgrpc: 56 | - '1.73' 57 | libopentelemetry_cpp: 58 | - '1.21' 59 | libprotobuf: 60 | - 6.31.1 61 | libutf8proc: 62 | - '2.11' 63 | lz4_c: 64 | - '1.10' 65 | macos_machine: 66 | - x86_64-apple-darwin13.4.0 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - osx-64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | zlib: 83 | - '1' 84 | zstd: 85 | - '1.5' 86 | -------------------------------------------------------------------------------- /.ci_support/osx_arm64_.yaml: -------------------------------------------------------------------------------- 1 | MACOSX_DEPLOYMENT_TARGET: 2 | - '11.0' 3 | MACOSX_SDK_VERSION: 4 | - '11.0' 5 | aws_crt_cpp: 6 | - 0.35.4 7 | aws_sdk_cpp: 8 | - 1.11.606 9 | azure_core_cpp: 10 | - 1.16.1 11 | azure_identity_cpp: 12 | - 1.13.2 13 | azure_storage_blobs_cpp: 14 | - 12.15.0 15 | azure_storage_files_datalake_cpp: 16 | - 12.13.0 17 | brotli: 18 | - '1.2' 19 | bzip2: 20 | - '1' 21 | c_compiler: 22 | - clang 23 | c_compiler_version: 24 | - '19' 25 | c_stdlib: 26 | - macosx_deployment_target 27 | c_stdlib_version: 28 | - '11.0' 29 | channel_sources: 30 | - conda-forge 31 | channel_targets: 32 | - conda-forge main 33 | cuda_compiler_version: 34 | - None 35 | cxx_compiler: 36 | - clangxx 37 | cxx_compiler_version: 38 | - '19' 39 | gflags: 40 | - '2.2' 41 | glog: 42 | - '0.7' 43 | libabseil: 44 | - '20250512' 45 | libarrow: 46 | - '*' 47 | libboost_devel: 48 | - '1.88' 49 | libbrotlicommon: 50 | - '1.2' 51 | libgoogle_cloud_devel: 52 | - '2.39' 53 | libgoogle_cloud_storage_devel: 54 | - '2.39' 55 | libgrpc: 56 | - '1.73' 57 | libopentelemetry_cpp: 58 | - '1.21' 59 | libprotobuf: 60 | - 6.31.1 61 | libutf8proc: 62 | - '2.11' 63 | lz4_c: 64 | - '1.10' 65 | macos_machine: 66 | - arm64-apple-darwin20.0.0 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - osx-arm64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | zlib: 83 | - '1' 84 | zstd: 85 | - '1.5' 86 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | stages: 6 | - stage: Check 7 | jobs: 8 | - job: Skip 9 | pool: 10 | vmImage: 'ubuntu-22.04' 11 | variables: 12 | DECODE_PERCENTS: 'false' 13 | RET: 'true' 14 | steps: 15 | - checkout: self 16 | fetchDepth: '2' 17 | - bash: | 18 | git_log=`git log --max-count=1 --skip=1 --pretty=format:"%B" | tr "\n" " "` 19 | echo "##vso[task.setvariable variable=log]$git_log" 20 | displayName: Obtain commit message 21 | - bash: echo "##vso[task.setvariable variable=RET]false" 22 | condition: and(eq(variables['Build.Reason'], 'PullRequest'), or(contains(variables.log, '[skip azp]'), contains(variables.log, '[azp skip]'), contains(variables.log, '[skip ci]'), contains(variables.log, '[ci skip]'))) 23 | displayName: Skip build? 24 | - bash: echo "##vso[task.setvariable variable=start_main;isOutput=true]$RET" 25 | name: result 26 | displayName: Export result 27 | - stage: Build 28 | condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true')) 29 | dependsOn: Check 30 | jobs: 31 | - template: ./.azure-pipelines/azure-pipelines-linux.yml 32 | - template: ./.azure-pipelines/azure-pipelines-osx.yml 33 | - template: ./.azure-pipelines/azure-pipelines-win.yml -------------------------------------------------------------------------------- /.ci_support/linux_64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '14' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - cuda-nvcc 31 | cuda_compiler_version: 32 | - None 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '14' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64:alma9 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | BSD-3-Clause license 2 | Copyright (c) 2015-2022, conda-forge contributors 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 3. Neither the name of the copyright holder nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 | ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR 21 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 23 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 24 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 25 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 26 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 27 | DAMAGE. 28 | -------------------------------------------------------------------------------- /.ci_support/linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '11' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - nvcc 31 | cuda_compiler_version: 32 | - '11.8' 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '11' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /.ci_support/linux_aarch64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '14' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - cuda-nvcc 31 | cuda_compiler_version: 32 | - None 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '14' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64:alma9 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-aarch64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /.ci_support/linux_ppc64le_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '14' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - cuda-nvcc 31 | cuda_compiler_version: 32 | - None 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '14' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64:alma9 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-ppc64le 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /.ci_support/linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '11' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - nvcc 31 | cuda_compiler_version: 32 | - '11.8' 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '11' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-aarch64 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /.ci_support/linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml: -------------------------------------------------------------------------------- 1 | aws_crt_cpp: 2 | - 0.35.4 3 | aws_sdk_cpp: 4 | - 1.11.606 5 | azure_core_cpp: 6 | - 1.16.1 7 | azure_identity_cpp: 8 | - 1.13.2 9 | azure_storage_blobs_cpp: 10 | - 12.15.0 11 | azure_storage_files_datalake_cpp: 12 | - 12.13.0 13 | brotli: 14 | - '1.2' 15 | bzip2: 16 | - '1' 17 | c_compiler: 18 | - gcc 19 | c_compiler_version: 20 | - '11' 21 | c_stdlib: 22 | - sysroot 23 | c_stdlib_version: 24 | - '2.17' 25 | channel_sources: 26 | - conda-forge 27 | channel_targets: 28 | - conda-forge main 29 | cuda_compiler: 30 | - nvcc 31 | cuda_compiler_version: 32 | - '11.8' 33 | cuda_compiler_version_min: 34 | - '11.8' 35 | cxx_compiler: 36 | - gxx 37 | cxx_compiler_version: 38 | - '11' 39 | docker_image: 40 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 41 | gflags: 42 | - '2.2' 43 | glog: 44 | - '0.7' 45 | libabseil: 46 | - '20250512' 47 | libarrow: 48 | - '*' 49 | libboost_devel: 50 | - '1.88' 51 | libbrotlicommon: 52 | - '1.2' 53 | libgoogle_cloud_devel: 54 | - '2.39' 55 | libgoogle_cloud_storage_devel: 56 | - '2.39' 57 | libgrpc: 58 | - '1.73' 59 | libopentelemetry_cpp: 60 | - '1.21' 61 | libprotobuf: 62 | - 6.31.1 63 | libutf8proc: 64 | - '2.11' 65 | lz4_c: 66 | - '1.10' 67 | openssl: 68 | - '3.5' 69 | orc: 70 | - 2.2.1 71 | re2: 72 | - 2025.08.12 73 | snappy: 74 | - '1.2' 75 | target_platform: 76 | - linux-ppc64le 77 | thrift_cpp: 78 | - 0.22.0 79 | zip_keys: 80 | - - c_compiler_version 81 | - cxx_compiler_version 82 | - c_stdlib_version 83 | - cuda_compiler_version 84 | - cuda_compiler 85 | - docker_image 86 | zlib: 87 | - '1' 88 | zstd: 89 | - '1.5' 90 | -------------------------------------------------------------------------------- /recipe/bld.bat: -------------------------------------------------------------------------------- 1 | @echo on 2 | 3 | mkdir cpp\build 4 | pushd cpp\build 5 | 6 | :: Enable CUDA support 7 | if "%cuda_compiler_version%"=="None" ( 8 | set "EXTRA_CMAKE_ARGS=-DARROW_CUDA=OFF" 9 | ) else ( 10 | set "EXTRA_CMAKE_ARGS=-DARROW_CUDA=ON" 11 | ) 12 | 13 | :: # reusable variable for dependencies we cannot yet enable 14 | set "READ_RECIPE_META_YAML_WHY_NOT=OFF" 15 | 16 | :: for available switches see 17 | :: https://github.com/apache/arrow/blame/apache-arrow-12.0.0/cpp/cmake_modules/DefineOptions.cmake 18 | cmake -G "Ninja" ^ 19 | -DARROW_ACERO=ON ^ 20 | -DARROW_AZURE=%READ_RECIPE_META_YAML_WHY_NOT% ^ 21 | -DARROW_BOOST_USE_SHARED:BOOL=ON ^ 22 | -DARROW_BUILD_STATIC:BOOL=OFF ^ 23 | -DARROW_BUILD_TESTS:BOOL=ON ^ 24 | -DARROW_BUILD_UTILITIES:BOOL=ON ^ 25 | -DARROW_COMPUTE:BOOL=ON ^ 26 | -DARROW_CSV:BOOL=ON ^ 27 | -DARROW_DATASET:BOOL=ON ^ 28 | -DARROW_DEPENDENCY_SOURCE=SYSTEM ^ 29 | -DARROW_ENABLE_TIMING_TESTS=OFF ^ 30 | -DARROW_FILESYSTEM:BOOL=ON ^ 31 | -DARROW_FLIGHT:BOOL=ON ^ 32 | -DARROW_FLIGHT_REQUIRE_TLSCREDENTIALSOPTIONS:BOOL=ON ^ 33 | -DARROW_FLIGHT_SQL:BOOL=ON ^ 34 | -DARROW_GANDIVA:BOOL=ON ^ 35 | -DARROW_GCS:BOOL=ON ^ 36 | -DARROW_HDFS:BOOL=ON ^ 37 | -DARROW_JSON:BOOL=ON ^ 38 | -DARROW_MIMALLOC:BOOL=ON ^ 39 | -DARROW_ORC:BOOL=ON ^ 40 | -DARROW_PACKAGE_PREFIX="%LIBRARY_PREFIX%" ^ 41 | -DARROW_PARQUET:BOOL=ON ^ 42 | -DPARQUET_BUILD_EXECUTABLES:BOOL=ON ^ 43 | -DARROW_S3:BOOL=ON ^ 44 | -DARROW_SIMD_LEVEL:STRING=DEFAULT ^ 45 | -DARROW_RUNTIME_SIMD_LEVEL=MAX ^ 46 | -DARROW_SUBSTRAIT:BOOL=ON ^ 47 | -DARROW_USE_GLOG:BOOL=ON ^ 48 | -DARROW_WITH_BROTLI:BOOL=ON ^ 49 | -DARROW_WITH_BZ2:BOOL=ON ^ 50 | -DARROW_WITH_LZ4:BOOL=ON ^ 51 | -DARROW_WITH_NLOHMANN_JSON:BOOL=ON ^ 52 | -DARROW_WITH_OPENTELEMETRY:BOOL=%READ_RECIPE_META_YAML_WHY_NOT% ^ 53 | -DARROW_WITH_SNAPPY:BOOL=ON ^ 54 | -DARROW_WITH_ZLIB:BOOL=ON ^ 55 | -DARROW_WITH_ZSTD:BOOL=ON ^ 56 | -DBUILD_SHARED_LIBS=ON ^ 57 | -DBoost_NO_BOOST_CMAKE=ON ^ 58 | -DCMAKE_BUILD_TYPE=release ^ 59 | -DCMAKE_CXX_STANDARD=17 ^ 60 | -DCMAKE_INSTALL_PREFIX="%LIBRARY_PREFIX%" ^ 61 | -DCMAKE_INTERPROCEDURAL_OPTIMIZATION=ON ^ 62 | -DCMAKE_UNITY_BUILD=OFF ^ 63 | -DLLVM_TOOLS_BINARY_DIR="%LIBRARY_BIN%" ^ 64 | -DLZ4_HOME="%LIBRARY_PREFIX%" ^ 65 | -DLZ4_INCLUDE_DIR="%LIBRARY_INC%" ^ 66 | -DLZ4_LIBRARY="%LIBRARY_LIB%\lz4.lib" ^ 67 | -DZSTD_HOME="%LIBRARY_PREFIX%" ^ 68 | -DZSTD_INCLUDE_DIR="%LIBRARY_INC%" ^ 69 | -DZSTD_LIBRARY="%LIBRARY_LIB%\libzstd.lib" ^ 70 | -DPARQUET_REQUIRE_ENCRYPTION:BOOL=ON ^ 71 | -DPython3_EXECUTABLE="%PYTHON%" ^ 72 | %EXTRA_CMAKE_ARGS% ^ 73 | .. 74 | if %ERRORLEVEL% neq 0 exit 1 75 | 76 | cmake --build . --config Release 77 | if %ERRORLEVEL% neq 0 exit 1 78 | 79 | if "%cuda_compiler_version%"=="None" ( 80 | npm install -g azurite 81 | set ARROW_TEST_DATA=%SRC_DIR%\testing\data 82 | set PARQUET_TEST_DATA=%SRC_DIR%\cpp\submodules\parquet-testing\data 83 | ctest --progress --output-on-failure 84 | if %ERRORLEVEL% neq 0 exit 1 85 | ) 86 | 87 | popd 88 | -------------------------------------------------------------------------------- /recipe/LLVM_LICENSE.txt: -------------------------------------------------------------------------------- 1 | ============================================================================== 2 | LLVM Release License 3 | ============================================================================== 4 | University of Illinois/NCSA 5 | Open Source License 6 | 7 | Copyright (c) 2003-2018 University of Illinois at Urbana-Champaign. 8 | All rights reserved. 9 | 10 | Developed by: 11 | 12 | LLVM Team 13 | 14 | University of Illinois at Urbana-Champaign 15 | 16 | http://llvm.org 17 | 18 | Permission is hereby granted, free of charge, to any person obtaining a copy of 19 | this software and associated documentation files (the "Software"), to deal with 20 | the Software without restriction, including without limitation the rights to 21 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 22 | of the Software, and to permit persons to whom the Software is furnished to do 23 | so, subject to the following conditions: 24 | 25 | * Redistributions of source code must retain the above copyright notice, 26 | this list of conditions and the following disclaimers. 27 | 28 | * Redistributions in binary form must reproduce the above copyright notice, 29 | this list of conditions and the following disclaimers in the 30 | documentation and/or other materials provided with the distribution. 31 | 32 | * Neither the names of the LLVM Team, University of Illinois at 33 | Urbana-Champaign, nor the names of its contributors may be used to 34 | endorse or promote products derived from this Software without specific 35 | prior written permission. 36 | 37 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 38 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 39 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 40 | CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 41 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 42 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE 43 | SOFTWARE. 44 | 45 | ============================================================================== 46 | Copyrights and Licenses for Third Party Software Distributed with LLVM: 47 | ============================================================================== 48 | The LLVM software contains code written by third parties. Such software will 49 | have its own individual LICENSE.TXT file in the directory in which it appears. 50 | This file will describe the copyrights, license, and restrictions which apply 51 | to that code. 52 | 53 | The disclaimer of warranty in the University of Illinois Open Source License 54 | applies to all code in the LLVM Distribution, and nothing in any of the 55 | other licenses gives permission to use the names of the LLVM Team or the 56 | University of Illinois to endorse or promote products derived from this 57 | Software. 58 | 59 | The following pieces of software have additional or alternate copyrights, 60 | licenses, and/or restrictions: 61 | 62 | Program Directory 63 | ------- --------- 64 | Google Test llvm/utils/unittest/googletest 65 | OpenBSD regex llvm/lib/Support/{reg*, COPYRIGHT.regex} 66 | pyyaml tests llvm/test/YAMLParser/{*.data, LICENSE.TXT} 67 | ARM contributions llvm/lib/Target/ARM/LICENSE.TXT 68 | md5 contributions llvm/lib/Support/MD5.cpp llvm/include/llvm/Support/MD5.h 69 | -------------------------------------------------------------------------------- /.ci_support/migrations/cuda118.yaml: -------------------------------------------------------------------------------- 1 | migrator_ts: 2145938400 2 | __migrator: 3 | kind: 4 | version 5 | migration_number: 6 | 1 7 | build_number: 8 | 1 9 | # This is intended as a _manual_ migrator to re-add CUDA 11.8, after we 10 | # dropped it as version that's built by default; DO NOT unpause 11 | paused: true 12 | override_cbc_keys: 13 | - cuda_compiler_stub 14 | operation: key_add 15 | check_solvable: false 16 | primary_key: cuda_compiler_version 17 | additional_zip_keys: 18 | - cuda_compiler 19 | - docker_image # [linux] 20 | ordering: 21 | cuda_compiler: 22 | - None 23 | - cuda-nvcc 24 | - nvcc 25 | cuda_compiler_version: 26 | - 12.4 27 | - 12.6 28 | - 12.8 29 | - None 30 | - 12.9 31 | - 11.8 32 | cuda_compiler_version_min: 33 | - 12.4 34 | - 12.6 35 | - 12.8 36 | - 12.9 37 | - 11.8 38 | 39 | cuda_compiler: # [(linux or win64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 40 | - nvcc # [(linux or win64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 41 | 42 | cuda_compiler_version: # [(linux or win64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 43 | - 11.8 # [(linux or win64) and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 44 | 45 | cuda_compiler_version_min: # [linux or win64] 46 | - 11.8 # [linux or win64] 47 | 48 | c_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 49 | - 11 # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 50 | 51 | cxx_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 52 | - 11 # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 53 | 54 | fortran_compiler_version: # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 55 | - 11 # [linux and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 56 | 57 | docker_image: # [os.environ.get("BUILD_PLATFORM", "").startswith("linux-") and os.environ.get("CF_CUDA_ENABLED", "False") == "True"] 58 | ### Docker images with CUDA 11.8 support 59 | 60 | # CUDA 11.8 builds (only x64 has a DEFAULT_LINUX_VERSION choice; alma9 not available) 61 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:cos7 # [linux64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-64" and os.environ.get("DEFAULT_LINUX_VERSION", "ubi8") == "cos7"] 62 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 # [linux64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-64" and os.environ.get("DEFAULT_LINUX_VERSION", "ubi8") in ("ubi8", "alma8", "alma9")] 63 | 64 | # CUDA 11.8 arch: native compilation (build == target) 65 | - quay.io/condaforge/linux-anvil-aarch64-cuda11.8:ubi8 # [aarch64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-aarch64"] 66 | - quay.io/condaforge/linux-anvil-ppc64le-cuda11.8:ubi8 # [ppc64le and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-ppc64le"] 67 | 68 | # CUDA 11.8 arch: cross-compilation (build != target) 69 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 # [aarch64 and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-64"] 70 | - quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 # [ppc64le and os.environ.get("CF_CUDA_ENABLED", "False") == "True" and os.environ.get("BUILD_PLATFORM") == "linux-64"] 71 | -------------------------------------------------------------------------------- /.scripts/build_steps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 4 | # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 5 | # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 6 | # benefit from the improvement. 7 | 8 | # -*- mode: jinja-shell -*- 9 | 10 | set -xeuo pipefail 11 | export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}" 12 | source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh 13 | 14 | 15 | ( endgroup "Start Docker" ) 2> /dev/null 16 | 17 | ( startgroup "Configuring conda" ) 2> /dev/null 18 | 19 | export PYTHONUNBUFFERED=1 20 | export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}" 21 | export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support" 22 | export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml" 23 | 24 | cat >~/.condarc < /opt/conda/conda-meta/history 36 | micromamba install --root-prefix ~/.conda --prefix /opt/conda \ 37 | --yes --override-channels --channel conda-forge --strict-channel-priority \ 38 | pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" 39 | export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 40 | 41 | # set up the condarc 42 | setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 43 | 44 | source run_conda_forge_build_setup 45 | 46 | 47 | 48 | # make the build number clobber 49 | make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 50 | 51 | if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]] && [[ "${HOST_PLATFORM}" != linux-* ]] && [[ "${BUILD_WITH_CONDA_DEBUG:-0}" != 1 ]]; then 52 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" 53 | fi 54 | 55 | 56 | ( endgroup "Configuring conda" ) 2> /dev/null 57 | 58 | if [[ -f "${FEEDSTOCK_ROOT}/LICENSE.txt" ]]; then 59 | cp "${FEEDSTOCK_ROOT}/LICENSE.txt" "${RECIPE_ROOT}/recipe-scripts-license.txt" 60 | fi 61 | 62 | if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then 63 | if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then 64 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" 65 | fi 66 | conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ 67 | ${EXTRA_CB_OPTIONS:-} \ 68 | --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" 69 | 70 | # Drop into an interactive shell 71 | /bin/bash 72 | else 73 | conda-build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ 74 | --suppress-variables ${EXTRA_CB_OPTIONS:-} \ 75 | --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" \ 76 | --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" 77 | ( startgroup "Inspecting artifacts" ) 2> /dev/null 78 | 79 | # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 80 | command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir "${RECIPE_ROOT}" -m "${CONFIG_FILE}" || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" 81 | 82 | ( endgroup "Inspecting artifacts" ) 2> /dev/null 83 | ( startgroup "Validating outputs" ) 2> /dev/null 84 | 85 | validate_recipe_outputs "${FEEDSTOCK_NAME}" 86 | 87 | ( endgroup "Validating outputs" ) 2> /dev/null 88 | 89 | ( startgroup "Uploading packages" ) 2> /dev/null 90 | 91 | if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then 92 | upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" 93 | fi 94 | 95 | ( endgroup "Uploading packages" ) 2> /dev/null 96 | fi 97 | 98 | ( startgroup "Final checks" ) 2> /dev/null 99 | 100 | touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" 101 | -------------------------------------------------------------------------------- /.azure-pipelines/azure-pipelines-linux.yml: -------------------------------------------------------------------------------- 1 | # This file was generated automatically from conda-smithy. To update this configuration, 2 | # update the conda-forge.yml and/or the recipe/meta.yaml. 3 | # -*- mode: yaml -*- 4 | 5 | jobs: 6 | - job: linux 7 | pool: 8 | vmImage: ubuntu-latest 9 | strategy: 10 | matrix: 11 | linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11: 12 | CONFIG: linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 13 | UPLOAD_PACKAGES: 'True' 14 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 15 | linux_64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14: 16 | CONFIG: linux_64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14 17 | UPLOAD_PACKAGES: 'True' 18 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 19 | linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11: 20 | CONFIG: linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 21 | UPLOAD_PACKAGES: 'True' 22 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 23 | linux_aarch64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14: 24 | CONFIG: linux_aarch64_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14 25 | UPLOAD_PACKAGES: 'True' 26 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 27 | linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11: 28 | CONFIG: linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 29 | UPLOAD_PACKAGES: 'True' 30 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8 31 | linux_ppc64le_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14: 32 | CONFIG: linux_ppc64le_c_compiler_version14cuda_compiler_versionNonecxx_compiler_version14 33 | UPLOAD_PACKAGES: 'True' 34 | DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9 35 | timeoutInMinutes: 360 36 | variables: {} 37 | 38 | steps: 39 | - script: | 40 | sudo mkdir -p /opt/empty_dir || true 41 | for d in \ 42 | /opt/ghc \ 43 | /opt/hostedtoolcache \ 44 | /usr/lib/jvm \ 45 | /usr/local/.ghcup \ 46 | /usr/local/lib/android \ 47 | /usr/local/share/powershell \ 48 | /usr/share/dotnet \ 49 | /usr/share/swift \ 50 | ; do 51 | sudo rsync --stats -a --delete /opt/empty_dir/ $d || true 52 | done 53 | sudo apt-get purge -y -f firefox \ 54 | google-chrome-stable \ 55 | microsoft-edge-stable 56 | sudo apt-get autoremove -y >& /dev/null 57 | sudo apt-get autoclean -y >& /dev/null 58 | df -h 59 | displayName: Manage disk space 60 | # configure qemu binfmt-misc running. This allows us to run docker containers 61 | # embedded qemu-static 62 | - script: | 63 | docker run --rm --privileged multiarch/qemu-user-static:register --reset --credential yes 64 | ls /proc/sys/fs/binfmt_misc/ 65 | condition: not(startsWith(variables['CONFIG'], 'linux_64')) 66 | displayName: Configure binfmt_misc 67 | 68 | - script: | 69 | export CI=azure 70 | export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) 71 | export remote_url=$(Build.Repository.Uri) 72 | export sha=$(Build.SourceVersion) 73 | export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME 74 | export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) 75 | if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then 76 | export IS_PR_BUILD="True" 77 | else 78 | export IS_PR_BUILD="False" 79 | fi 80 | .scripts/run_docker_build.sh 81 | displayName: Run docker build 82 | env: 83 | BINSTAR_TOKEN: $(BINSTAR_TOKEN) 84 | FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) 85 | STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) 86 | -------------------------------------------------------------------------------- /recipe/activate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # for the gdb-wrappers, we need to create a symlink that 4 | # contains the full path of the lib _within_ the installed 5 | # env, which we don't have until the env is created. 6 | 7 | # doesn't come with a deactivate script, because the symlink 8 | # is benign and doesn't need to be deleted. 9 | 10 | _la_log() { 11 | if [ "${CF_LIBARROW_ACTIVATE_LOGGING:-}" = "1" ]; then 12 | # The following loop is necessary to handle multi-line strings 13 | # like for the output of `ls -al`. 14 | printf '%s\n' "$*" | while IFS= read -r line 15 | do 16 | echo "$CONDA_PREFIX/etc/conda/activate.d/libarrow_activate.sh DEBUG: $line" 17 | done 18 | fi 19 | } 20 | 21 | # Skip activation if CONDA_BUILD environment variable is set. 22 | # (CONDA_BUILD is also set in the test stage, and we don't want to skip there.) 23 | # Otherwise, the symlinks will be included in packages built with libarrow as a host dependency. 24 | # see https://github.com/conda-forge/arrow-cpp-feedstock/issues/1478 25 | if [ -n "${CONDA_BUILD:-}" ] && [ "${CONDA_BUILD_STATE:-0}" != "TEST" ]; then 26 | _la_log "CONDA_BUILD is set to $CONDA_BUILD (and CONDA_BUILD_STATE != \"TEST\"), skipping libarrow activation." 27 | return 0 28 | fi 29 | 30 | _la_log "Beginning libarrow activation." 31 | 32 | # where the GDB wrappers get installed 33 | _la_gdb_prefix="$CONDA_PREFIX/share/gdb/auto-load" 34 | 35 | # If the directory is not writable, nothing can be done 36 | if [ ! -w "$_la_gdb_prefix" ]; then 37 | _la_log 'No rights to modify $_la_gdb_prefix, cannot create symlink!' 38 | _la_log 'Unless you plan to use the GDB debugger with libarrow, this warning can be safely ignored.' 39 | return 40 | fi 41 | 42 | # this needs to be in sync with ARROW_GDB_INSTALL_DIR in build.sh 43 | _la_placeholder="replace_this_section_with_absolute_slashed_path_to_CONDA_PREFIX" 44 | # the paths here are intentionally stacked, see #935, resp. 45 | # https://github.com/apache/arrow/blob/master/docs/source/cpp/gdb.rst#manual-loading 46 | _la_symlink_dir="$_la_gdb_prefix/$CONDA_PREFIX/lib" 47 | _la_orig_install_dir="$_la_gdb_prefix/$_la_placeholder/lib" 48 | 49 | _la_log " _la_gdb_prefix: $_la_gdb_prefix" 50 | _la_log " _la_placeholder: $_la_placeholder" 51 | _la_log " _la_symlink_dir: $_la_symlink_dir" 52 | _la_log " _la_orig_install_dir: $_la_orig_install_dir" 53 | _la_log " content of that folder:" 54 | _la_log "$(ls -al "$_la_orig_install_dir" | sed 's/^/ /')" 55 | 56 | # there's only one lib in the _la_orig_install_dir folder, but the libname changes 57 | # based on the version so use a loop instead of hardcoding it. 58 | for _la_target in "$_la_orig_install_dir/"*.py; do 59 | if [ ! -e "$_la_target" ]; then 60 | # If the file doesn't exist, skip this iteration of the loop. 61 | # (This happens when no files are found, in which case the 62 | # loop runs with target equal to the pattern itself.) 63 | _la_log 'Folder $_la_orig_install_dir seems to not contain .py files, skipping.' 64 | continue 65 | fi 66 | _la_symlink="$_la_symlink_dir/$(basename "$_la_target")" 67 | _la_log " _la_target: $_la_target" 68 | _la_log " _la_symlink: $_la_symlink" 69 | if [ -L "$_la_symlink" ] && [ "$(readlink "$_la_symlink")" = "$_la_target" ]; then 70 | _la_log 'symlink $_la_symlink already exists and points to $_la_target, skipping.' 71 | continue 72 | fi 73 | _la_log 'Creating symlink $_la_symlink pointing to $_la_target.' 74 | mkdir -p "$_la_symlink_dir" || true 75 | # this check also creates the symlink; if it fails, we enter the if-branch. 76 | if ! ln -sf "$_la_target" "$_la_symlink"; then 77 | echo -n "${BASH_SOURCE[0]} WARNING: Failed to create symlink from " 78 | echo "'$_la_target' to '$_la_symlink'!" 79 | echo "Unless you plan to use the GDB debugger with libarrow, this warning can be safely ignored." 80 | continue 81 | fi 82 | done 83 | 84 | _la_log "Libarrow activation complete." 85 | 86 | unset _la_gdb_prefix 87 | unset _la_log 88 | unset _la_orig_install_dir 89 | unset _la_placeholder 90 | unset _la_symlink 91 | unset _la_symlink_dir 92 | unset _la_target 93 | -------------------------------------------------------------------------------- /.scripts/run_docker_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 4 | # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 5 | # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 6 | # benefit from the improvement. 7 | 8 | source .scripts/logging_utils.sh 9 | 10 | ( startgroup "Configure Docker" ) 2> /dev/null 11 | 12 | set -xeo pipefail 13 | 14 | THISDIR="$( cd "$( dirname "$0" )" >/dev/null && pwd )" 15 | PROVIDER_DIR="$(basename "$THISDIR")" 16 | 17 | FEEDSTOCK_ROOT="$( cd "$( dirname "$0" )/.." >/dev/null && pwd )" 18 | RECIPE_ROOT="${FEEDSTOCK_ROOT}/recipe" 19 | 20 | if [ -z ${FEEDSTOCK_NAME} ]; then 21 | export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) 22 | fi 23 | 24 | if [[ "${sha:-}" == "" ]]; then 25 | pushd "${FEEDSTOCK_ROOT}" 26 | sha=$(git rev-parse HEAD) 27 | popd 28 | fi 29 | 30 | docker info 31 | 32 | # In order for the conda-build process in the container to write to the mounted 33 | # volumes, we need to run with the same id as the host machine, which is 34 | # normally the owner of the mounted volumes, or at least has write permission 35 | export HOST_USER_ID=$(id -u) 36 | # Check if docker-machine is being used (normally on OSX) and get the uid from 37 | # the VM 38 | if hash docker-machine 2> /dev/null && docker-machine active > /dev/null; then 39 | export HOST_USER_ID=$(docker-machine ssh $(docker-machine active) id -u) 40 | fi 41 | 42 | ARTIFACTS="$FEEDSTOCK_ROOT/build_artifacts" 43 | 44 | if [ -z "$CONFIG" ]; then 45 | set +x 46 | FILES=`ls .ci_support/linux_*` 47 | CONFIGS="" 48 | for file in $FILES; do 49 | CONFIGS="${CONFIGS}'${file:12:-5}' or "; 50 | done 51 | echo "Need to set CONFIG env variable. Value can be one of ${CONFIGS:0:-4}" 52 | exit 1 53 | fi 54 | 55 | if [ -z "${DOCKER_IMAGE}" ]; then 56 | SHYAML_INSTALLED="$(shyaml -h || echo NO)" 57 | if [ "${SHYAML_INSTALLED}" == "NO" ]; then 58 | echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Trying to parse with coreutils" 59 | DOCKER_IMAGE=$(cat .ci_support/${CONFIG}.yaml | grep '^docker_image:$' -A 1 | tail -n 1 | cut -b 3-) 60 | if [ "${DOCKER_IMAGE}" = "" ]; then 61 | echo "No docker_image entry found in ${CONFIG}. Falling back to quay.io/condaforge/linux-anvil-comp7" 62 | DOCKER_IMAGE="quay.io/condaforge/linux-anvil-comp7" 63 | fi 64 | else 65 | DOCKER_IMAGE="$(cat "${FEEDSTOCK_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 quay.io/condaforge/linux-anvil-comp7 )" 66 | fi 67 | fi 68 | 69 | mkdir -p "$ARTIFACTS" 70 | DONE_CANARY="$ARTIFACTS/conda-forge-build-done-${CONFIG}" 71 | rm -f "$DONE_CANARY" 72 | 73 | # Allow people to specify extra default arguments to `docker run` (e.g. `--rm`) 74 | DOCKER_RUN_ARGS="${CONDA_FORGE_DOCKER_RUN_ARGS}" 75 | if [ -z "${CI}" ]; then 76 | DOCKER_RUN_ARGS="-it ${DOCKER_RUN_ARGS}" 77 | fi 78 | 79 | ( endgroup "Configure Docker" ) 2> /dev/null 80 | 81 | ( startgroup "Start Docker" ) 2> /dev/null 82 | 83 | export UPLOAD_PACKAGES="${UPLOAD_PACKAGES:-True}" 84 | export IS_PR_BUILD="${IS_PR_BUILD:-False}" 85 | docker pull "${DOCKER_IMAGE}" 86 | docker run ${DOCKER_RUN_ARGS} \ 87 | -v "${RECIPE_ROOT}":/home/conda/recipe_root:rw,z,delegated \ 88 | -v "${FEEDSTOCK_ROOT}":/home/conda/feedstock_root:rw,z,delegated \ 89 | -e CONFIG \ 90 | -e HOST_USER_ID \ 91 | -e UPLOAD_PACKAGES \ 92 | -e IS_PR_BUILD \ 93 | -e GIT_BRANCH \ 94 | -e UPLOAD_ON_BRANCH \ 95 | -e CI \ 96 | -e FEEDSTOCK_NAME \ 97 | -e CPU_COUNT \ 98 | -e BUILD_WITH_CONDA_DEBUG \ 99 | -e BUILD_OUTPUT_ID \ 100 | -e flow_run_id \ 101 | -e remote_url \ 102 | -e sha \ 103 | -e BINSTAR_TOKEN \ 104 | -e FEEDSTOCK_TOKEN \ 105 | -e STAGING_BINSTAR_TOKEN \ 106 | "${DOCKER_IMAGE}" \ 107 | bash \ 108 | "/home/conda/feedstock_root/${PROVIDER_DIR}/build_steps.sh" 109 | 110 | # verify that the end of the script was reached 111 | test -f "$DONE_CANARY" 112 | 113 | # This closes the last group opened in `build_steps.sh` 114 | ( endgroup "Final checks" ) 2> /dev/null 115 | -------------------------------------------------------------------------------- /.scripts/run_osx_build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # -*- mode: jinja-shell -*- 4 | 5 | source .scripts/logging_utils.sh 6 | 7 | set -xe 8 | 9 | MINIFORGE_HOME="${MINIFORGE_HOME:-${HOME}/miniforge3}" 10 | MINIFORGE_HOME="${MINIFORGE_HOME%/}" # remove trailing slash 11 | export CONDA_BLD_PATH="${CONDA_BLD_PATH:-${MINIFORGE_HOME}/conda-bld}" 12 | 13 | ( startgroup "Provisioning base env with micromamba" ) 2> /dev/null 14 | MICROMAMBA_VERSION="1.5.10-0" 15 | if [[ "$(uname -m)" == "arm64" ]]; then 16 | osx_arch="osx-arm64" 17 | else 18 | osx_arch="osx-64" 19 | fi 20 | MICROMAMBA_URL="https://github.com/mamba-org/micromamba-releases/releases/download/${MICROMAMBA_VERSION}/micromamba-${osx_arch}" 21 | MAMBA_ROOT_PREFIX="${MINIFORGE_HOME}-micromamba-$(date +%s)" 22 | echo "Downloading micromamba ${MICROMAMBA_VERSION}" 23 | micromamba_exe="$(mktemp -d)/micromamba" 24 | curl -L -o "${micromamba_exe}" "${MICROMAMBA_URL}" 25 | chmod +x "${micromamba_exe}" 26 | echo "Creating environment" 27 | "${micromamba_exe}" create --yes --root-prefix "${MAMBA_ROOT_PREFIX}" --prefix "${MINIFORGE_HOME}" \ 28 | --channel conda-forge \ 29 | pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" 30 | echo "Moving pkgs cache from ${MAMBA_ROOT_PREFIX} to ${MINIFORGE_HOME}" 31 | mv "${MAMBA_ROOT_PREFIX}/pkgs" "${MINIFORGE_HOME}" 32 | echo "Cleaning up micromamba" 33 | rm -rf "${MAMBA_ROOT_PREFIX}" "${micromamba_exe}" || true 34 | ( endgroup "Provisioning base env with micromamba" ) 2> /dev/null 35 | 36 | ( startgroup "Configuring conda" ) 2> /dev/null 37 | echo "Activating environment" 38 | source "${MINIFORGE_HOME}/etc/profile.d/conda.sh" 39 | conda activate base 40 | export CONDA_SOLVER="libmamba" 41 | export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 42 | 43 | 44 | 45 | 46 | 47 | echo -e "\n\nSetting up the condarc and mangling the compiler." 48 | setup_conda_rc ./ ./recipe ./.ci_support/${CONFIG}.yaml 49 | 50 | if [[ "${CI:-}" != "" ]]; then 51 | mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml 52 | fi 53 | 54 | if [[ "${CI:-}" != "" ]]; then 55 | echo -e "\n\nMangling homebrew in the CI to avoid conflicts." 56 | /usr/bin/sudo mangle_homebrew 57 | /usr/bin/sudo -k 58 | else 59 | echo -e "\n\nNot mangling homebrew as we are not running in CI" 60 | fi 61 | 62 | if [[ "${sha:-}" == "" ]]; then 63 | sha=$(git rev-parse HEAD) 64 | fi 65 | 66 | echo -e "\n\nRunning the build setup script." 67 | source run_conda_forge_build_setup 68 | 69 | 70 | 71 | ( endgroup "Configuring conda" ) 2> /dev/null 72 | 73 | echo -e "\n\nMaking the build clobber file" 74 | make_build_number ./ ./recipe ./.ci_support/${CONFIG}.yaml 75 | 76 | if [[ -f LICENSE.txt ]]; then 77 | cp LICENSE.txt "recipe/recipe-scripts-license.txt" 78 | fi 79 | 80 | if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then 81 | if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then 82 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" 83 | fi 84 | conda debug ./recipe -m ./.ci_support/${CONFIG}.yaml \ 85 | ${EXTRA_CB_OPTIONS:-} \ 86 | --clobber-file ./.ci_support/clobber_${CONFIG}.yaml 87 | 88 | # Drop into an interactive shell 89 | /bin/bash 90 | else 91 | 92 | if [[ "${HOST_PLATFORM}" != "${BUILD_PLATFORM}" ]]; then 93 | EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" 94 | fi 95 | 96 | conda-build ./recipe -m ./.ci_support/${CONFIG}.yaml \ 97 | --suppress-variables ${EXTRA_CB_OPTIONS:-} \ 98 | --clobber-file ./.ci_support/clobber_${CONFIG}.yaml \ 99 | --extra-meta flow_run_id="$flow_run_id" remote_url="$remote_url" sha="$sha" 100 | 101 | ( startgroup "Inspecting artifacts" ) 2> /dev/null 102 | 103 | # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 104 | command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir ./recipe -m ./.ci_support/${CONFIG}.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" 105 | 106 | ( endgroup "Inspecting artifacts" ) 2> /dev/null 107 | ( startgroup "Validating outputs" ) 2> /dev/null 108 | 109 | validate_recipe_outputs "${FEEDSTOCK_NAME}" 110 | 111 | ( endgroup "Validating outputs" ) 2> /dev/null 112 | 113 | ( startgroup "Uploading packages" ) 2> /dev/null 114 | 115 | if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then 116 | upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" ./ ./recipe ./.ci_support/${CONFIG}.yaml 117 | fi 118 | 119 | ( endgroup "Uploading packages" ) 2> /dev/null 120 | fi 121 | -------------------------------------------------------------------------------- /recipe/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | mkdir -p cpp/build 5 | pushd cpp/build 6 | 7 | # Include g++'s system headers 8 | if [ "$(uname)" == "Linux" ]; then 9 | SYSTEM_INCLUDES=$(echo | ${CXX} -E -Wp,-v -xc++ - 2>&1 | grep '^ ' | awk '{print "-isystem;" substr($1, 1)}' | tr '\n' ';') 10 | ARROW_GANDIVA_PC_CXX_FLAGS="${SYSTEM_INCLUDES}" 11 | else 12 | # See https://conda-forge.org/docs/maintainer/knowledge_base.html#newer-c-features-with-old-sdk 13 | CXXFLAGS="${CXXFLAGS} -D_LIBCPP_DISABLE_AVAILABILITY" 14 | ARROW_GANDIVA_PC_CXX_FLAGS="-D_LIBCPP_DISABLE_AVAILABILITY" 15 | fi 16 | 17 | # Enable CUDA support 18 | if [[ ! -z "${cuda_compiler_version+x}" && "${cuda_compiler_version}" != "None" ]] 19 | then 20 | CMAKE_ARGS="${CMAKE_ARGS} -DARROW_CUDA=ON -DCUDAToolkit_ROOT=${CUDA_HOME} -DCMAKE_LIBRARY_PATH=${CONDA_BUILD_SYSROOT}/lib" 21 | else 22 | CMAKE_ARGS="${CMAKE_ARGS} -DARROW_CUDA=OFF" 23 | fi 24 | 25 | if [[ "${build_platform}" != "${target_platform}" ]]; then 26 | # point to a usable protoc/grpc_cpp_plugin if we're cross-compiling 27 | CMAKE_ARGS="${CMAKE_ARGS} -DProtobuf_PROTOC_EXECUTABLE=$BUILD_PREFIX/bin/protoc" 28 | if [[ ! -f ${BUILD_PREFIX}/bin/${CONDA_TOOLCHAIN_HOST}-clang ]]; then 29 | ln -sf ${BUILD_PREFIX}/bin/clang ${BUILD_PREFIX}/bin/${CONDA_TOOLCHAIN_HOST}-clang 30 | fi 31 | CMAKE_ARGS="${CMAKE_ARGS} -DCLANG_EXECUTABLE=${BUILD_PREFIX}/bin/${CONDA_TOOLCHAIN_HOST}-clang" 32 | CMAKE_ARGS="${CMAKE_ARGS} -DLLVM_LINK_EXECUTABLE=${BUILD_PREFIX}/bin/llvm-link" 33 | CMAKE_ARGS="${CMAKE_ARGS} -DARROW_JEMALLOC_LG_PAGE=16" 34 | CMAKE_ARGS="${CMAKE_ARGS} -DARROW_GRPC_CPP_PLUGIN=${BUILD_PREFIX}/bin/grpc_cpp_plugin" 35 | fi 36 | 37 | # disable -fno-plt, which causes problems with GCC on PPC 38 | if [[ "$target_platform" == "linux-ppc64le" ]]; then 39 | CFLAGS="$(echo $CFLAGS | sed 's/-fno-plt //g')" 40 | CXXFLAGS="$(echo $CXXFLAGS | sed 's/-fno-plt //g')" 41 | fi 42 | 43 | if [[ "${target_platform}" == "linux-aarch64" ]] || [[ "${target_platform}" == "linux-ppc64le" ]]; then 44 | # Limit number of threads used to avoid hardware oversubscription 45 | export CMAKE_BUILD_PARALLEL_LEVEL=3 46 | fi 47 | 48 | # IPO produces segfaults in the test suite on macOS x86-64 49 | if [[ "$target_platform" == "osx-64" ]]; then 50 | CMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF 51 | else 52 | CMAKE_INTERPROCEDURAL_OPTIMIZATION=ON 53 | fi 54 | 55 | # reusable variable for dependencies we cannot yet unvendor 56 | export READ_RECIPE_META_YAML_WHY_NOT=OFF 57 | 58 | # for available switches see 59 | # https://github.com/apache/arrow/blame/apache-arrow-12.0.0/cpp/cmake_modules/DefineOptions.cmake 60 | # placeholder in ARROW_GDB_INSTALL_DIR must match _la_placeholder in activate.sh 61 | cmake -GNinja \ 62 | -DARROW_ACERO=ON \ 63 | -DARROW_AZURE=ON \ 64 | -DARROW_BOOST_USE_SHARED=ON \ 65 | -DARROW_BUILD_BENCHMARKS=OFF \ 66 | -DARROW_BUILD_STATIC=OFF \ 67 | -DARROW_BUILD_TESTS=ON \ 68 | -DARROW_BUILD_UTILITIES=ON \ 69 | -DARROW_COMPUTE=ON \ 70 | -DARROW_CSV=ON \ 71 | -DARROW_CXXFLAGS="${CXXFLAGS}" \ 72 | -DARROW_DATASET=ON \ 73 | -DARROW_DEPENDENCY_SOURCE=SYSTEM \ 74 | -DARROW_ENABLE_TIMING_TESTS=OFF \ 75 | -DARROW_FILESYSTEM=ON \ 76 | -DARROW_FLIGHT=ON \ 77 | -DARROW_FLIGHT_REQUIRE_TLSCREDENTIALSOPTIONS=ON \ 78 | -DARROW_FLIGHT_SQL=ON \ 79 | -DARROW_GANDIVA=ON \ 80 | -DARROW_GANDIVA_PC_CXX_FLAGS="${ARROW_GANDIVA_PC_CXX_FLAGS}" \ 81 | -DARROW_GCS=ON \ 82 | -DARROW_GDB_INSTALL_DIR=replace_this_section_with_absolute_slashed_path_to_CONDA_PREFIX/lib \ 83 | -DARROW_HDFS=ON \ 84 | -DARROW_JEMALLOC=ON \ 85 | -DARROW_JSON=ON \ 86 | -DARROW_MIMALLOC=ON \ 87 | -DARROW_ORC=ON \ 88 | -DARROW_PACKAGE_PREFIX=$PREFIX \ 89 | -DARROW_PARQUET=ON \ 90 | -DPARQUET_BUILD_EXECUTABLES=ON \ 91 | -DPARQUET_REQUIRE_ENCRYPTION=ON \ 92 | -DARROW_S3=ON \ 93 | -DARROW_SIMD_LEVEL=DEFAULT \ 94 | -DARROW_RUNTIME_SIMD_LEVEL=MAX \ 95 | -DARROW_SUBSTRAIT=ON \ 96 | -DARROW_USE_GLOG=ON \ 97 | -DARROW_USE_LD_GOLD=ON \ 98 | -DARROW_WITH_BROTLI=ON \ 99 | -DARROW_WITH_BZ2=ON \ 100 | -DARROW_WITH_LZ4=ON \ 101 | -DARROW_WITH_NLOHMANN_JSON=ON \ 102 | -DARROW_WITH_OPENTELEMETRY=ON \ 103 | -DARROW_WITH_SNAPPY=ON \ 104 | -DARROW_WITH_UCX=OFF \ 105 | -DARROW_WITH_ZLIB=ON \ 106 | -DARROW_WITH_ZSTD=ON \ 107 | -DBUILD_SHARED_LIBS=ON \ 108 | -DCMAKE_BUILD_TYPE=release \ 109 | -DCMAKE_CXX_STANDARD=17 \ 110 | -DCMAKE_INSTALL_LIBDIR=lib \ 111 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 112 | -DCMAKE_INTERPROCEDURAL_OPTIMIZATION:BOOL=${CMAKE_INTERPROCEDURAL_OPTIMIZATION} \ 113 | -DLLVM_TOOLS_BINARY_DIR=$PREFIX/bin \ 114 | -DZSTD_HOME=${PREFIX} \ 115 | -DZSTD_INCLUDE_DIR=${PREFIX}/include \ 116 | -DZSTD_LIBRARY=${PREFIX}/lib/libzstd${SHLIB_EXT} \ 117 | -DMAKE=$BUILD_PREFIX/bin/make \ 118 | -DPython3_EXECUTABLE=${PYTHON} \ 119 | ${CMAKE_ARGS} \ 120 | .. 121 | 122 | # Do not install arrow, only build. 123 | cmake --build . --config Release 124 | 125 | if [[ "$CONDA_BUILD_CROSS_COMPILATION" != 1 && "$cuda_compiler_version" == "None" ]]; then 126 | npm install -g azurite 127 | export ARROW_TEST_DATA=$SRC_DIR/testing/data 128 | export PARQUET_TEST_DATA=$SRC_DIR/cpp/submodules/parquet-testing/data 129 | ctest --progress --output-on-failure 130 | fi 131 | 132 | popd 133 | -------------------------------------------------------------------------------- /recipe/install-libarrow.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # temporary prefix to be able to install files more granularly 5 | mkdir temp_prefix 6 | 7 | cmake --install ./cpp/build --prefix=./temp_prefix 8 | 9 | if [[ "${PKG_NAME}" == "libarrow" ]]; then 10 | # only libarrow (+ activation scripts) 11 | cp -a ./temp_prefix/lib/libarrow.* $PREFIX/lib 12 | cp -a ./temp_prefix/lib/libarrow_cuda.* $PREFIX/lib || true 13 | cp ./temp_prefix/lib/pkgconfig/arrow.pc $PREFIX/lib/pkgconfig 14 | cp ./temp_prefix/lib/pkgconfig/arrow-csv.pc $PREFIX/lib/pkgconfig 15 | cp ./temp_prefix/lib/pkgconfig/arrow-cuda.pc $PREFIX/lib/pkgconfig || true 16 | cp ./temp_prefix/lib/pkgconfig/arrow-filesystem.pc $PREFIX/lib/pkgconfig 17 | cp ./temp_prefix/lib/pkgconfig/arrow-json.pc $PREFIX/lib/pkgconfig 18 | cp ./temp_prefix/lib/pkgconfig/arrow-orc.pc $PREFIX/lib/pkgconfig 19 | cp -R ./temp_prefix/lib/cmake/Arrow/. $PREFIX/lib/cmake/Arrow 20 | cp -R ./temp_prefix/lib/cmake/ArrowCUDA/. $PREFIX/lib/cmake/ArrowCUDA || true 21 | cp -R ./temp_prefix/share/arrow/. $PREFIX/share/arrow 22 | cp -R ./temp_prefix/share/doc/. $PREFIX/share/doc 23 | cp -R ./temp_prefix/share/gdb/. $PREFIX/share/gdb 24 | cp -R ./temp_prefix/include/arrow/. $PREFIX/include/arrow 25 | 26 | # Copy the [de]activate scripts to $PREFIX/etc/conda/[de]activate.d, see 27 | # https://conda-forge.org/docs/maintainer/adding_pkgs.html#activate-scripts 28 | for CHANGE in "activate" 29 | do 30 | mkdir -p "${PREFIX}/etc/conda/${CHANGE}.d" 31 | cp "${RECIPE_DIR}/${CHANGE}.sh" "${PREFIX}/etc/conda/${CHANGE}.d/${PKG_NAME}_${CHANGE}.sh" 32 | done 33 | elif [[ "${PKG_NAME}" == "libarrow-acero" ]]; then 34 | # only libarrow-acero 35 | cp -a ./temp_prefix/lib/libarrow_acero.* $PREFIX/lib 36 | cp ./temp_prefix/lib/pkgconfig/arrow-acero.pc $PREFIX/lib/pkgconfig 37 | cp -R ./temp_prefix/lib/cmake/ArrowAcero/. $PREFIX/lib/cmake/ArrowAcero 38 | elif [[ "${PKG_NAME}" == "libarrow-compute" ]]; then 39 | # only libarrow-compute 40 | cp -a ./temp_prefix/lib/libarrow_compute.* $PREFIX/lib 41 | cp ./temp_prefix/lib/pkgconfig/arrow-compute.pc $PREFIX/lib/pkgconfig 42 | cp -R ./temp_prefix/lib/cmake/ArrowCompute/. $PREFIX/lib/cmake/ArrowCompute 43 | elif [[ "${PKG_NAME}" == "libarrow-dataset" ]]; then 44 | # only libarrow-dataset 45 | cp -a ./temp_prefix/lib/libarrow_dataset.* $PREFIX/lib 46 | cp ./temp_prefix/lib/pkgconfig/arrow-dataset.pc $PREFIX/lib/pkgconfig 47 | cp -R ./temp_prefix/lib/cmake/ArrowDataset/. $PREFIX/lib/cmake/ArrowDataset 48 | elif [[ "${PKG_NAME}" == "libarrow-flight" ]]; then 49 | # only libarrow-flight 50 | cp -a ./temp_prefix/lib/libarrow_flight.* $PREFIX/lib 51 | cp ./temp_prefix/lib/libarrow_flight_transport_ucx.* $PREFIX/lib || true 52 | cp ./temp_prefix/lib/pkgconfig/arrow-flight.pc $PREFIX/lib/pkgconfig 53 | cp -R ./temp_prefix/lib/cmake/ArrowFlight/. $PREFIX/lib/cmake/ArrowFlight 54 | elif [[ "${PKG_NAME}" == "libarrow-flight-sql" ]]; then 55 | # only libarrow-flight-sql 56 | cp -a ./temp_prefix/lib/libarrow_flight_sql.* $PREFIX/lib 57 | cp ./temp_prefix/lib/pkgconfig/arrow-flight-sql.pc $PREFIX/lib/pkgconfig 58 | cp -R ./temp_prefix/lib/cmake/ArrowFlightSql/. $PREFIX/lib/cmake/ArrowFlightSql 59 | elif [[ "${PKG_NAME}" == "libarrow-gandiva" ]]; then 60 | # only libarrow-gandiva 61 | cp -a ./temp_prefix/lib/libgandiva.* $PREFIX/lib 62 | cp ./temp_prefix/lib/pkgconfig/gandiva.pc $PREFIX/lib/pkgconfig 63 | cp -R ./temp_prefix/lib/cmake/Gandiva/. $PREFIX/lib/cmake/Gandiva 64 | cp -R ./temp_prefix/include/gandiva/. $PREFIX/include/gandiva 65 | elif [[ "${PKG_NAME}" == "libarrow-substrait" ]]; then 66 | # only libarrow-substrait 67 | cp -a ./temp_prefix/lib/libarrow_substrait.* $PREFIX/lib 68 | cp ./temp_prefix/lib/pkgconfig/arrow-substrait.pc $PREFIX/lib/pkgconfig 69 | cp -R ./temp_prefix/lib/cmake/ArrowSubstrait/. $PREFIX/lib/cmake/ArrowSubstrait 70 | elif [[ "${PKG_NAME}" == "libparquet" ]]; then 71 | # only parquet 72 | cp -a ./temp_prefix/lib/libparquet.* $PREFIX/lib 73 | cp ./temp_prefix/lib/pkgconfig/parquet.pc $PREFIX/lib/pkgconfig 74 | cp -R ./temp_prefix/lib/cmake/Parquet/. $PREFIX/lib/cmake/Parquet 75 | cp -R ./temp_prefix/include/parquet/. $PREFIX/include/parquet 76 | elif [[ "${PKG_NAME}" == "parquet-utils" ]]; then 77 | cp ./temp_prefix/bin/parquet-* $PREFIX/bin 78 | elif [[ "${PKG_NAME}" == "arrow-utils" ]]; then 79 | cp ./temp_prefix/bin/arrow-* $PREFIX/bin 80 | elif [[ "${PKG_NAME}" == "libarrow-all" ]]; then 81 | # libarrow-all: install everything else (whatever ends up in this output 82 | # should generally be installed into the appropriate libarrow-). 83 | cmake --install ./cpp/build --prefix=$PREFIX 84 | # remove testing bits, c.f. https://github.com/apache/arrow/issues/44993 85 | rm -f $PREFIX/lib/{libarrow_testing,libarrow_flight_testing}.* 86 | rm -f $PREFIX/lib/pkgconfig/{arrow-testing,arrow-flight-testing}.pc 87 | rm -rf $PREFIX/lib/cmake/{ArrowTesting,ArrowFlightTesting} 88 | else 89 | # shouldn't happen 90 | exit 1 91 | fi 92 | 93 | if [[ "${PKG_NAME}" != "libarrow" ]]; then 94 | # delete symlink that's created by libarrow's activation script, 95 | # to avoid that it gets wrongly detected as content of libarrow-*. 96 | rm $PREFIX/share/gdb/auto-load/$PREFIX/lib/libarrow.*-gdb.py || true 97 | fi 98 | 99 | # Clean up temp_prefix 100 | rm -rf temp_prefix 101 | -------------------------------------------------------------------------------- /.scripts/run_win_build.bat: -------------------------------------------------------------------------------- 1 | :: PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here 2 | :: will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent 3 | :: changes to this script, consider a proposal to conda-smithy so that other feedstocks can also 4 | :: benefit from the improvement. 5 | 6 | :: INPUTS (required environment variables) 7 | :: CONFIG: name of the .ci_support/*.yaml file for this job 8 | :: CI: azure, github_actions, or unset 9 | :: MINIFORGE_HOME: where to install the base conda environment 10 | :: UPLOAD_PACKAGES: true or false 11 | :: UPLOAD_ON_BRANCH: true or false 12 | 13 | setlocal enableextensions enabledelayedexpansion 14 | 15 | FOR %%A IN ("%~dp0.") DO SET "REPO_ROOT=%%~dpA" 16 | if "%MINIFORGE_HOME%"=="" set "MINIFORGE_HOME=%USERPROFILE%\Miniforge3" 17 | :: Remove trailing backslash, if present 18 | if "%MINIFORGE_HOME:~-1%"=="\" set "MINIFORGE_HOME=%MINIFORGE_HOME:~0,-1%" 19 | call :start_group "Provisioning base env with micromamba" 20 | set "MAMBA_ROOT_PREFIX=%MINIFORGE_HOME%-micromamba-%RANDOM%" 21 | set "MICROMAMBA_VERSION=1.5.10-0" 22 | set "MICROMAMBA_URL=https://github.com/mamba-org/micromamba-releases/releases/download/%MICROMAMBA_VERSION%/micromamba-win-64" 23 | set "MICROMAMBA_TMPDIR=%TMP%\micromamba-%RANDOM%" 24 | set "MICROMAMBA_EXE=%MICROMAMBA_TMPDIR%\micromamba.exe" 25 | 26 | echo Downloading micromamba %MICROMAMBA_VERSION% 27 | if not exist "%MICROMAMBA_TMPDIR%" mkdir "%MICROMAMBA_TMPDIR%" 28 | powershell -ExecutionPolicy Bypass -Command "(New-Object Net.WebClient).DownloadFile('%MICROMAMBA_URL%', '%MICROMAMBA_EXE%')" 29 | if !errorlevel! neq 0 exit /b !errorlevel! 30 | 31 | echo Creating environment 32 | call "%MICROMAMBA_EXE%" create --yes --root-prefix "%MAMBA_ROOT_PREFIX%" --prefix "%MINIFORGE_HOME%" ^ 33 | --channel conda-forge ^ 34 | pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" 35 | if !errorlevel! neq 0 exit /b !errorlevel! 36 | echo Removing %MAMBA_ROOT_PREFIX% 37 | del /S /Q "%MAMBA_ROOT_PREFIX%" >nul 38 | del /S /Q "%MICROMAMBA_TMPDIR%" >nul 39 | call :end_group 40 | 41 | call :start_group "Configuring conda" 42 | 43 | :: Activate the base conda environment 44 | echo Activating environment 45 | call "%MINIFORGE_HOME%\Scripts\activate.bat" 46 | :: Configure the solver 47 | set "CONDA_SOLVER=libmamba" 48 | if !errorlevel! neq 0 exit /b !errorlevel! 49 | set "CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1" 50 | 51 | :: Set basic configuration 52 | echo Setting up configuration 53 | setup_conda_rc .\ ".\recipe" .\.ci_support\%CONFIG%.yaml 54 | if !errorlevel! neq 0 exit /b !errorlevel! 55 | echo Running build setup 56 | CALL run_conda_forge_build_setup 57 | 58 | 59 | if !errorlevel! neq 0 exit /b !errorlevel! 60 | 61 | if EXIST LICENSE.txt ( 62 | echo Copying feedstock license 63 | copy LICENSE.txt "recipe\\recipe-scripts-license.txt" 64 | ) 65 | if NOT [%HOST_PLATFORM%] == [%BUILD_PLATFORM%] ( 66 | if [%CROSSCOMPILING_EMULATOR%] == [] ( 67 | set "EXTRA_CB_OPTIONS=%EXTRA_CB_OPTIONS% --no-test" 68 | ) 69 | ) 70 | 71 | if NOT [%flow_run_id%] == [] ( 72 | set "EXTRA_CB_OPTIONS=%EXTRA_CB_OPTIONS% --extra-meta flow_run_id=%flow_run_id% remote_url=%remote_url% sha=%sha%" 73 | ) 74 | 75 | call :end_group 76 | 77 | :: Build the recipe 78 | echo Building recipe 79 | conda-build.exe "recipe" -m .ci_support\%CONFIG%.yaml --suppress-variables %EXTRA_CB_OPTIONS% 80 | if !errorlevel! neq 0 exit /b !errorlevel! 81 | 82 | call :start_group "Inspecting artifacts" 83 | :: inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 84 | WHERE inspect_artifacts >nul 2>nul && inspect_artifacts --recipe-dir ".\recipe" -m .ci_support\%CONFIG%.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" 85 | call :end_group 86 | 87 | :: Prepare some environment variables for the upload step 88 | if /i "%CI%" == "github_actions" ( 89 | set "FEEDSTOCK_NAME=%GITHUB_REPOSITORY:*/=%" 90 | set "GIT_BRANCH=%GITHUB_REF:refs/heads/=%" 91 | if /i "%GITHUB_EVENT_NAME%" == "pull_request" ( 92 | set "IS_PR_BUILD=True" 93 | ) else ( 94 | set "IS_PR_BUILD=False" 95 | ) 96 | set "TEMP=%RUNNER_TEMP%" 97 | ) 98 | if /i "%CI%" == "azure" ( 99 | set "FEEDSTOCK_NAME=%BUILD_REPOSITORY_NAME:*/=%" 100 | set "GIT_BRANCH=%BUILD_SOURCEBRANCHNAME%" 101 | if /i "%BUILD_REASON%" == "PullRequest" ( 102 | set "IS_PR_BUILD=True" 103 | ) else ( 104 | set "IS_PR_BUILD=False" 105 | ) 106 | set "TEMP=%UPLOAD_TEMP%" 107 | ) 108 | 109 | :: Validate 110 | call :start_group "Validating outputs" 111 | validate_recipe_outputs "%FEEDSTOCK_NAME%" 112 | if !errorlevel! neq 0 exit /b !errorlevel! 113 | call :end_group 114 | 115 | if /i "%UPLOAD_PACKAGES%" == "true" ( 116 | if /i "%IS_PR_BUILD%" == "false" ( 117 | call :start_group "Uploading packages" 118 | if not exist "%TEMP%\" md "%TEMP%" 119 | set "TMP=%TEMP%" 120 | upload_package --validate --feedstock-name="%FEEDSTOCK_NAME%" .\ ".\recipe" .ci_support\%CONFIG%.yaml 121 | if !errorlevel! neq 0 exit /b !errorlevel! 122 | call :end_group 123 | ) 124 | ) 125 | 126 | exit 127 | 128 | :: Logging subroutines 129 | 130 | :start_group 131 | if /i "%CI%" == "github_actions" ( 132 | echo ::group::%~1 133 | exit /b 134 | ) 135 | if /i "%CI%" == "azure" ( 136 | echo ##[group]%~1 137 | exit /b 138 | ) 139 | echo %~1 140 | exit /b 141 | 142 | :end_group 143 | if /i "%CI%" == "github_actions" ( 144 | echo ::endgroup:: 145 | exit /b 146 | ) 147 | if /i "%CI%" == "azure" ( 148 | echo ##[endgroup] 149 | exit /b 150 | ) 151 | exit /b 152 | -------------------------------------------------------------------------------- /recipe/install-libarrow.bat: -------------------------------------------------------------------------------- 1 | @echo on 2 | 3 | :: Create temporary prefix to be able to install files more granularly 4 | mkdir temp_prefix 5 | 6 | cmake --install .\cpp\build --prefix=.\temp_prefix 7 | 8 | if [%PKG_NAME%] == [libarrow] ( 9 | move .\temp_prefix\lib\arrow.lib %LIBRARY_LIB% 10 | move .\temp_prefix\bin\arrow.dll %LIBRARY_BIN% 11 | move .\temp_prefix\lib\arrow_cuda.lib %LIBRARY_LIB% || true 12 | move .\temp_prefix\bin\arrow_cuda.dll %LIBRARY_BIN% || true 13 | copy .\temp_prefix\lib\pkgconfig\arrow.pc %LIBRARY_LIB%\pkgconfig 14 | copy .\temp_prefix\lib\pkgconfig\arrow-csv.pc %LIBRARY_LIB%\pkgconfig 15 | copy .\temp_prefix\lib\pkgconfig\arrow-cuda.pc %LIBRARY_LIB%\pkgconfig || true 16 | copy .\temp_prefix\lib\pkgconfig\arrow-filesystem.pc %LIBRARY_LIB%\pkgconfig 17 | copy .\temp_prefix\lib\pkgconfig\arrow-json.pc %LIBRARY_LIB%\pkgconfig 18 | copy .\temp_prefix\lib\pkgconfig\arrow-orc.pc %LIBRARY_LIB%\pkgconfig 19 | mkdir %LIBRARY_LIB%\cmake\Arrow 20 | move .\temp_prefix\lib\cmake\Arrow\* %LIBRARY_LIB%\cmake\Arrow 21 | mkdir %LIBRARY_LIB%\cmake\ArrowCUDA 22 | move .\temp_prefix\lib\cmake\ArrowCUDA\* %LIBRARY_LIB%\cmake\ArrowCUDA || true 23 | mkdir %LIBRARY_PREFIX%\share\doc\arrow 24 | move .\temp_prefix\share\doc\arrow\* %LIBRARY_PREFIX%\share\doc\arrow 25 | mkdir %LIBRARY_PREFIX%\share\arrow 26 | xcopy /s /y .\temp_prefix\share\arrow %LIBRARY_PREFIX%\share\arrow 27 | mkdir %LIBRARY_PREFIX%\include\arrow 28 | xcopy /s /y .\temp_prefix\include\arrow %LIBRARY_PREFIX%\include\arrow 29 | ) else if [%PKG_NAME%] == [libarrow-acero] ( 30 | move .\temp_prefix\lib\arrow_acero.lib %LIBRARY_LIB% 31 | move .\temp_prefix\bin\arrow_acero.dll %LIBRARY_BIN% 32 | copy .\temp_prefix\lib\pkgconfig\arrow-acero.pc %LIBRARY_LIB%\pkgconfig 33 | mkdir %LIBRARY_LIB%\cmake\ArrowAcero 34 | move .\temp_prefix\lib\cmake\ArrowAcero\* %LIBRARY_LIB%\cmake\ArrowAcero 35 | ) else if [%PKG_NAME%] == [libarrow-compute] ( 36 | move .\temp_prefix\lib\arrow_compute.lib %LIBRARY_LIB% 37 | move .\temp_prefix\bin\arrow_compute.dll %LIBRARY_BIN% 38 | copy .\temp_prefix\lib\pkgconfig\arrow-compute.pc %LIBRARY_LIB%\pkgconfig 39 | mkdir %LIBRARY_LIB%\cmake\ArrowCompute 40 | move .\temp_prefix\lib\cmake\ArrowCompute\* %LIBRARY_LIB%\cmake\ArrowCompute 41 | ) else if [%PKG_NAME%] == [libarrow-dataset] ( 42 | move .\temp_prefix\lib\arrow_dataset.lib %LIBRARY_LIB% 43 | move .\temp_prefix\bin\arrow_dataset.dll %LIBRARY_BIN% 44 | copy .\temp_prefix\lib\pkgconfig\arrow-dataset.pc %LIBRARY_LIB%\pkgconfig 45 | mkdir %LIBRARY_LIB%\cmake\ArrowDataset 46 | move .\temp_prefix\lib\cmake\ArrowDataset\* %LIBRARY_LIB%\cmake\ArrowDataset 47 | ) else if [%PKG_NAME%] == [libarrow-flight] ( 48 | move .\temp_prefix\lib\arrow_flight.lib %LIBRARY_LIB% 49 | move .\temp_prefix\bin\arrow_flight.dll %LIBRARY_BIN% 50 | copy .\temp_prefix\lib\pkgconfig\arrow-flight.pc %LIBRARY_LIB%\pkgconfig 51 | mkdir %LIBRARY_LIB%\cmake\ArrowFlight 52 | move .\temp_prefix\lib\cmake\ArrowFlight\* %LIBRARY_LIB%\cmake\ArrowFlight 53 | ) else if [%PKG_NAME%] == [libarrow-flight-sql] ( 54 | move .\temp_prefix\lib\arrow_flight_sql.lib %LIBRARY_LIB% 55 | move .\temp_prefix\bin\arrow_flight_sql.dll %LIBRARY_BIN% 56 | copy .\temp_prefix\lib\pkgconfig\arrow-flight-sql.pc %LIBRARY_LIB%\pkgconfig 57 | mkdir %LIBRARY_LIB%\cmake\ArrowFlightSql 58 | move .\temp_prefix\lib\cmake\ArrowFlightSql\* %LIBRARY_LIB%\cmake\ArrowFlightSql 59 | ) else if [%PKG_NAME%] == [libarrow-gandiva] ( 60 | move .\temp_prefix\lib\gandiva.lib %LIBRARY_LIB% 61 | move .\temp_prefix\bin\gandiva.dll %LIBRARY_BIN% 62 | copy .\temp_prefix\lib\pkgconfig\gandiva.pc %LIBRARY_LIB%\pkgconfig 63 | mkdir %LIBRARY_LIB%\cmake\Gandiva 64 | move .\temp_prefix\lib\cmake\Gandiva\* %LIBRARY_LIB%\cmake\Gandiva 65 | mkdir %LIBRARY_PREFIX%\include\gandiva 66 | xcopy /s /y .\temp_prefix\include\gandiva %LIBRARY_PREFIX%\include\gandiva 67 | ) else if [%PKG_NAME%] == [libarrow-substrait] ( 68 | move .\temp_prefix\lib\arrow_substrait.lib %LIBRARY_LIB% 69 | move .\temp_prefix\bin\arrow_substrait.dll %LIBRARY_BIN% 70 | copy .\temp_prefix\lib\pkgconfig\arrow-substrait.pc %LIBRARY_LIB%\pkgconfig 71 | mkdir %LIBRARY_LIB%\cmake\ArrowSubstrait 72 | move .\temp_prefix\lib\cmake\ArrowSubstrait\* %LIBRARY_LIB%\cmake\ArrowSubstrait 73 | ) else if [%PKG_NAME%] == [libparquet] ( 74 | move .\temp_prefix\lib\parquet.lib %LIBRARY_LIB% 75 | move .\temp_prefix\bin\parquet.dll %LIBRARY_BIN% 76 | copy .\temp_prefix\lib\pkgconfig\parquet.pc %LIBRARY_LIB%\pkgconfig 77 | mkdir %LIBRARY_LIB%\cmake\Parquet 78 | move .\temp_prefix\lib\cmake\Parquet\* %LIBRARY_LIB%\cmake\Parquet 79 | mkdir %LIBRARY_PREFIX%\include\parquet 80 | xcopy /s /y .\temp_prefix\include\parquet %LIBRARY_PREFIX%\include\parquet 81 | ) else if [%PKG_NAME%] == [parquet-utils] ( 82 | copy .\temp_prefix\bin\parquet-*.exe %LIBRARY_BIN% 83 | ) else if [%PKG_NAME%] == [arrow-utils] ( 84 | copy .\temp_prefix\bin\arrow-*.exe %LIBRARY_BIN% 85 | ) else if [%PKG_NAME%] == [libarrow-all] ( 86 | REM libarrow-all: install everything else (whatever ends up in this output 87 | REM should generally be installed into the appropriate libarrow-). 88 | cmake --install .\cpp\build --prefix=%LIBRARY_PREFIX% 89 | REM remove testing bits, c.f. https://github.com/apache/arrow/issues/44993 90 | for %%F in (arrow_testing arrow_flight_testing) do ( 91 | del /s /q "%LIBRARY_LIB%\%%F.lib" 92 | del /s /q "%LIBRARY_BIN%\%%F.dll" 93 | ) 94 | for %%F in (ArrowTesting ArrowFlightTesting) do ( 95 | rmdir /s /q "%LIBRARY_LIB%\cmake\%%F" 96 | ) 97 | for %%F in (arrow-testing arrow-flight-testing) do ( 98 | del /s /q "%LIBRARY_LIB%\pkgconfig\%%F.pc" 99 | ) 100 | ) else ( 101 | REM shouldn't happen 102 | exit 1 103 | ) 104 | 105 | :: clean up temp_prefix between builds 106 | rmdir /s /q temp_prefix 107 | -------------------------------------------------------------------------------- /recipe/patches/0004-GH-48260-C-Python-R-Move-S3-bucket-references-to-new.patch: -------------------------------------------------------------------------------- 1 | From c4f7a9859aaebc9e7a94fea125e10c7d54d56a30 Mon Sep 17 00:00:00 2001 2 | From: Nic Crane 3 | Date: Fri, 28 Nov 2025 10:00:28 +0000 4 | Subject: [PATCH 4/4] GH-48260: [C++][Python][R] Move S3 bucket references to 5 | new bucket as Voltron Data ones will be removed soon (#48261) 6 | MIME-Version: 1.0 7 | Content-Type: text/plain; charset=UTF-8 8 | Content-Transfer-Encoding: 8bit 9 | 10 | No more VD, no more VD S3 bucket! 11 | 12 | Move references to S3 bucket to the new Arrow one, update a few references to regions and things. 13 | 14 | Yeah, for the most part. 15 | 16 | No 17 | * GitHub Issue: #48260 18 | 19 | Authored-by: Nic Crane 20 | Signed-off-by: Raúl Cumplido 21 | --- 22 | cpp/src/arrow/filesystem/s3fs_test.cc | 2 +- 23 | docs/source/python/dataset.rst | 8 ++++---- 24 | python/pyarrow/_s3fs.pyx | 4 ++-- 25 | python/pyarrow/tests/test_fs.py | 22 +++++++++++----------- 26 | r/R/filesystem.R | 6 +++--- 27 | r/man/gs_bucket.Rd | 2 +- 28 | r/man/s3_bucket.Rd | 4 ++-- 29 | r/tests/testthat/test-filesystem.R | 14 +++++++------- 30 | r/vignettes/arrow.Rmd | 2 +- 31 | r/vignettes/dataset.Rmd | 4 ++-- 32 | r/vignettes/fs.Rmd | 24 ++++++++++++------------ 33 | r/vignettes/python.Rmd | 2 +- 34 | 12 files changed, 47 insertions(+), 47 deletions(-) 35 | 36 | diff --git a/cpp/src/arrow/filesystem/s3fs_test.cc b/cpp/src/arrow/filesystem/s3fs_test.cc 37 | index b5578b4f74..3eb898ea71 100644 38 | --- a/cpp/src/arrow/filesystem/s3fs_test.cc 39 | +++ b/cpp/src/arrow/filesystem/s3fs_test.cc 40 | @@ -420,7 +420,7 @@ TEST_F(S3OptionsTest, FromAssumeRole) { 41 | class S3RegionResolutionTest : public AwsTestMixin {}; 42 | 43 | TEST_F(S3RegionResolutionTest, PublicBucket) { 44 | - ASSERT_OK_AND_EQ("us-east-2", ResolveS3BucketRegion("voltrondata-labs-datasets")); 45 | + ASSERT_OK_AND_EQ("us-east-1", ResolveS3BucketRegion("arrow-datasets")); 46 | 47 | // Taken from a registry of open S3-hosted datasets 48 | // at https://github.com/awslabs/open-data-registry 49 | diff --git a/docs/source/python/dataset.rst b/docs/source/python/dataset.rst 50 | index 00469fd57b..de4ff7be4c 100644 51 | --- a/docs/source/python/dataset.rst 52 | +++ b/docs/source/python/dataset.rst 53 | @@ -350,7 +350,7 @@ specifying a S3 path: 54 | 55 | .. code-block:: python 56 | 57 | - dataset = ds.dataset("s3://voltrondata-labs-datasets/nyc-taxi/") 58 | + dataset = ds.dataset("s3://arrow-datasets/nyc-taxi/") 59 | 60 | Typically, you will want to customize the connection parameters, and then 61 | a file system object can be created and passed to the ``filesystem`` keyword: 62 | @@ -359,8 +359,8 @@ a file system object can be created and passed to the ``filesystem`` keyword: 63 | 64 | from pyarrow import fs 65 | 66 | - s3 = fs.S3FileSystem(region="us-east-2") 67 | - dataset = ds.dataset("voltrondata-labs-datasets/nyc-taxi/", filesystem=s3) 68 | + s3 = fs.S3FileSystem(region="us-east-1") 69 | + dataset = ds.dataset("arrow-datasets/nyc-taxi/", filesystem=s3) 70 | 71 | The currently available classes are :class:`~pyarrow.fs.S3FileSystem` and 72 | :class:`~pyarrow.fs.HadoopFileSystem`. See the :ref:`filesystem` docs for more 73 | @@ -381,7 +381,7 @@ useful for testing or benchmarking. 74 | 75 | # By default, MinIO will listen for unencrypted HTTP traffic. 76 | minio = fs.S3FileSystem(scheme="http", endpoint_override="localhost:9000") 77 | - dataset = ds.dataset("voltrondata-labs-datasets/nyc-taxi/", filesystem=minio) 78 | + dataset = ds.dataset("arrow-datasets/nyc-taxi/", filesystem=minio) 79 | 80 | 81 | Working with Parquet Datasets 82 | diff --git a/python/pyarrow/_s3fs.pyx b/python/pyarrow/_s3fs.pyx 83 | index b01e0b6ae4..6317bd3785 100644 84 | --- a/python/pyarrow/_s3fs.pyx 85 | +++ b/python/pyarrow/_s3fs.pyx 86 | @@ -91,8 +91,8 @@ def resolve_s3_region(bucket): 87 | 88 | Examples 89 | -------- 90 | - >>> fs.resolve_s3_region('voltrondata-labs-datasets') 91 | - 'us-east-2' 92 | + >>> fs.resolve_s3_region('arrow-datasets') 93 | + 'us-east-1' 94 | """ 95 | cdef: 96 | c_string c_bucket 97 | diff --git a/python/pyarrow/tests/test_fs.py b/python/pyarrow/tests/test_fs.py 98 | index a3b10f4cbe..0c7f1d9665 100644 99 | --- a/python/pyarrow/tests/test_fs.py 100 | +++ b/python/pyarrow/tests/test_fs.py 101 | @@ -1460,20 +1460,20 @@ def test_s3fs_wrong_region(): 102 | # anonymous=True incase CI/etc has invalid credentials 103 | fs = S3FileSystem(region='eu-north-1', anonymous=True) 104 | 105 | - msg = ("When getting information for bucket 'voltrondata-labs-datasets': " 106 | + msg = ("When getting information for bucket 'arrow-datasets': " 107 | r"AWS Error UNKNOWN \(HTTP status 301\) during HeadBucket " 108 | "operation: No response body. Looks like the configured region is " 109 | - "'eu-north-1' while the bucket is located in 'us-east-2'." 110 | + "'eu-north-1' while the bucket is located in 'us-east-1'." 111 | "|NETWORK_CONNECTION") 112 | with pytest.raises(OSError, match=msg) as exc: 113 | - fs.get_file_info("voltrondata-labs-datasets") 114 | + fs.get_file_info("arrow-datasets") 115 | 116 | # Sometimes fails on unrelated network error, so next call would also fail. 117 | if 'NETWORK_CONNECTION' in str(exc.value): 118 | return 119 | 120 | - fs = S3FileSystem(region='us-east-2', anonymous=True) 121 | - fs.get_file_info("voltrondata-labs-datasets") 122 | + fs = S3FileSystem(region='us-east-1', anonymous=True) 123 | + fs.get_file_info("arrow-datasets") 124 | 125 | 126 | @pytest.mark.azure 127 | @@ -1911,15 +1911,15 @@ def test_s3_real_aws(): 128 | fs = S3FileSystem(anonymous=True) 129 | assert fs.region == default_region 130 | 131 | - fs = S3FileSystem(anonymous=True, region='us-east-2') 132 | + fs = S3FileSystem(anonymous=True, region='us-east-1') 133 | entries = fs.get_file_info(FileSelector( 134 | - 'voltrondata-labs-datasets/nyc-taxi')) 135 | + 'arrow-datasets/nyc-taxi')) 136 | assert len(entries) > 0 137 | - key = 'voltrondata-labs-datasets/nyc-taxi/year=2019/month=6/part-0.parquet' 138 | + key = 'arrow-datasets/nyc-taxi/year=2019/month=6/part-0.parquet' 139 | with fs.open_input_stream(key) as f: 140 | md = f.metadata() 141 | assert 'Content-Type' in md 142 | - assert md['Last-Modified'] == b'2022-07-12T23:32:00Z' 143 | + assert md['Last-Modified'] == b'2025-11-26T10:28:55Z' 144 | # For some reason, the header value is quoted 145 | # (both with AWS and Minio) 146 | assert md['ETag'] == b'"4c6a76826a695c6ac61592bc30cda3df-16"' 147 | @@ -1962,7 +1962,7 @@ def test_s3_real_aws_region_selection(): 148 | @pytest.mark.s3 149 | def test_resolve_s3_region(): 150 | from pyarrow.fs import resolve_s3_region 151 | - assert resolve_s3_region('voltrondata-labs-datasets') == 'us-east-2' 152 | + assert resolve_s3_region('arrow-datasets') == 'us-east-1' 153 | assert resolve_s3_region('mf-nwp-models') == 'eu-west-1' 154 | 155 | with pytest.raises(ValueError, match="Not a valid bucket name"): 156 | @@ -2119,7 +2119,7 @@ def test_s3_finalize_region_resolver(): 157 | with pytest.raises(ValueError, match="S3 .* finalized"): 158 | resolve_s3_region('mf-nwp-models') 159 | with pytest.raises(ValueError, match="S3 .* finalized"): 160 | - resolve_s3_region('voltrondata-labs-datasets') 161 | + resolve_s3_region('arrow-datasets') 162 | """ 163 | subprocess.check_call([sys.executable, "-c", code]) 164 | 165 | diff --git a/r/R/filesystem.R b/r/R/filesystem.R 166 | index 233e1981aa..ca219611a8 100644 167 | --- a/r/R/filesystem.R 168 | +++ b/r/R/filesystem.R 169 | @@ -488,13 +488,13 @@ default_s3_options <- list( 170 | #' relative path. Note that this function's success does not guarantee that you 171 | #' are authorized to access the bucket's contents. 172 | #' @examplesIf FALSE 173 | -#' bucket <- s3_bucket("voltrondata-labs-datasets") 174 | +#' bucket <- s3_bucket("arrow-datasets") 175 | #' 176 | #' @examplesIf FALSE 177 | #' # Turn on debug logging. The following line of code should be run in a fresh 178 | #' # R session prior to any calls to `s3_bucket()` (or other S3 functions) 179 | #' Sys.setenv("ARROW_S3_LOG_LEVEL" = "DEBUG") 180 | -#' bucket <- s3_bucket("voltrondata-labs-datasets") 181 | +#' bucket <- s3_bucket("arrow-datasets") 182 | #' 183 | #' @export 184 | s3_bucket <- function(bucket, ...) { 185 | @@ -530,7 +530,7 @@ s3_bucket <- function(bucket, ...) { 186 | #' relative path. Note that this function's success does not guarantee that you 187 | #' are authorized to access the bucket's contents. 188 | #' @examplesIf FALSE 189 | -#' bucket <- gs_bucket("voltrondata-labs-datasets") 190 | +#' bucket <- gs_bucket("arrow-datasets") 191 | #' @export 192 | gs_bucket <- function(bucket, ...) { 193 | assert_that(is.string(bucket)) 194 | diff --git a/r/man/gs_bucket.Rd b/r/man/gs_bucket.Rd 195 | index 7dc39a42c3..44b2efb7d9 100644 196 | --- a/r/man/gs_bucket.Rd 197 | +++ b/r/man/gs_bucket.Rd 198 | @@ -22,6 +22,6 @@ that holds onto its relative path 199 | } 200 | \examples{ 201 | \dontshow{if (FALSE) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 202 | -bucket <- gs_bucket("voltrondata-labs-datasets") 203 | +bucket <- gs_bucket("arrow-datasets") 204 | \dontshow{\}) # examplesIf} 205 | } 206 | diff --git a/r/man/s3_bucket.Rd b/r/man/s3_bucket.Rd 207 | index bffcfa5c38..66a552ccbc 100644 208 | --- a/r/man/s3_bucket.Rd 209 | +++ b/r/man/s3_bucket.Rd 210 | @@ -30,12 +30,12 @@ be useful to increase the log level. See the Notes section in 211 | } 212 | \examples{ 213 | \dontshow{if (FALSE) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 214 | -bucket <- s3_bucket("voltrondata-labs-datasets") 215 | +bucket <- s3_bucket("arrow-datasets") 216 | \dontshow{\}) # examplesIf} 217 | \dontshow{if (FALSE) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 218 | # Turn on debug logging. The following line of code should be run in a fresh 219 | # R session prior to any calls to `s3_bucket()` (or other S3 functions) 220 | Sys.setenv("ARROW_S3_LOG_LEVEL"="DEBUG") 221 | -bucket <- s3_bucket("voltrondata-labs-datasets") 222 | +bucket <- s3_bucket("arrow-datasets") 223 | \dontshow{\}) # examplesIf} 224 | } 225 | diff --git a/r/tests/testthat/test-filesystem.R b/r/tests/testthat/test-filesystem.R 226 | index 34095acc25..cf17c6b74e 100644 227 | --- a/r/tests/testthat/test-filesystem.R 228 | +++ b/r/tests/testthat/test-filesystem.R 229 | @@ -146,20 +146,20 @@ test_that("FileSystem$from_uri", { 230 | skip_on_cran() 231 | skip_if_not_available("s3") 232 | skip_if_offline() 233 | - fs_and_path <- FileSystem$from_uri("s3://voltrondata-labs-datasets") 234 | + fs_and_path <- FileSystem$from_uri("s3://arrow-datasets") 235 | expect_r6_class(fs_and_path$fs, "S3FileSystem") 236 | - expect_identical(fs_and_path$fs$region, "us-east-2") 237 | + expect_identical(fs_and_path$fs$region, "us-east-1") 238 | }) 239 | 240 | test_that("SubTreeFileSystem$create() with URI", { 241 | skip_on_cran() 242 | skip_if_not_available("s3") 243 | skip_if_offline() 244 | - fs <- SubTreeFileSystem$create("s3://voltrondata-labs-datasets") 245 | + fs <- SubTreeFileSystem$create("s3://arrow-datasets") 246 | expect_r6_class(fs, "SubTreeFileSystem") 247 | expect_identical( 248 | capture.output(print(fs)), 249 | - "SubTreeFileSystem: s3://voltrondata-labs-datasets/" 250 | + "SubTreeFileSystem: s3://arrow-datasets/" 251 | ) 252 | }) 253 | 254 | @@ -193,12 +193,12 @@ test_that("gs_bucket", { 255 | skip_on_cran() 256 | skip_if_not_available("gcs") 257 | skip_if_offline() 258 | - bucket <- gs_bucket("voltrondata-labs-datasets") 259 | + bucket <- gs_bucket("arrow-datasets") 260 | expect_r6_class(bucket, "SubTreeFileSystem") 261 | expect_r6_class(bucket$base_fs, "GcsFileSystem") 262 | expect_identical( 263 | capture.output(print(bucket)), 264 | - "SubTreeFileSystem: gs://voltrondata-labs-datasets/" 265 | + "SubTreeFileSystem: gs://arrow-datasets/" 266 | ) 267 | - expect_identical(bucket$base_path, "voltrondata-labs-datasets/") 268 | + expect_identical(bucket$base_path, "arrow-datasets/") 269 | }) 270 | diff --git a/r/vignettes/arrow.Rmd b/r/vignettes/arrow.Rmd 271 | index be31f3a0ac..d8460415bd 100644 272 | --- a/r/vignettes/arrow.Rmd 273 | +++ b/r/vignettes/arrow.Rmd 274 | @@ -178,7 +178,7 @@ To learn more about analyzing Arrow data, see the [data wrangling article](./dat 275 | Another use for the arrow R package is to read, write, and analyze data sets stored remotely on cloud services. The package currently supports both Amazon Simple Storage Service (S3) and Google Cloud Storage (GCS). The example below illustrates how you can use `s3_bucket()` to refer to a an S3 bucket, and use `open_dataset()` to connect to the data set stored there: 276 | 277 | ```{r, eval=FALSE} 278 | -bucket <- s3_bucket("voltrondata-labs-datasets/nyc-taxi") 279 | +bucket <- s3_bucket("arrow-datasets/nyc-taxi") 280 | nyc_taxi <- open_dataset(bucket) 281 | ``` 282 | 283 | diff --git a/r/vignettes/dataset.Rmd b/r/vignettes/dataset.Rmd 284 | index bf8c00a5b6..085113033c 100644 285 | --- a/r/vignettes/dataset.Rmd 286 | +++ b/r/vignettes/dataset.Rmd 287 | @@ -22,13 +22,13 @@ This multi-file data set is comprised of 158 distinct Parquet files, each corres 288 | If you have Amazon S3 support enabled in arrow (true for most users; see links at the end of this article if you need to troubleshoot this), you can connect to a copy of the "tiny taxi data" stored on S3 with this command: 289 | 290 | ```r 291 | -bucket <- s3_bucket("voltrondata-labs-datasets/nyc-taxi-tiny") 292 | +bucket <- s3_bucket("arrow-datasets/nyc-taxi-tiny") 293 | ``` 294 | 295 | Alternatively you could connect to a copy of the data on Google Cloud Storage (GCS) using the following command: 296 | 297 | ```r 298 | -bucket <- gs_bucket("voltrondata-labs-datasets/nyc-taxi-tiny", anonymous = TRUE) 299 | +bucket <- gs_bucket("arrow-datasets/nyc-taxi-tiny", anonymous = TRUE) 300 | ``` 301 | 302 | If you want to use the full data set, replace `nyc-taxi-tiny` with `nyc-taxi` in the code above. Apart from size -- and with it the cost in time, bandwidth usage, and CPU cycles -- there is no difference in the two versions of the data: you can test your code using the tiny taxi data and then check how it scales using the full data set. 303 | diff --git a/r/vignettes/fs.Rmd b/r/vignettes/fs.Rmd 304 | index 07476877c5..ed3b1bddb0 100644 305 | --- a/r/vignettes/fs.Rmd 306 | +++ b/r/vignettes/fs.Rmd 307 | @@ -39,16 +39,16 @@ and pass the result to file readers and writers (`read_parquet()`, `write_feathe 308 | 309 | Often the reason users work with cloud storage in real world analysis is to access large data sets. An example of this is discussed in the [datasets article](./dataset.html), but new users may prefer to work with a much smaller data set while learning how the arrow cloud storage interface works. To that end, the examples in this article rely on a multi-file Parquet dataset that stores a copy of the `diamonds` data made available through the [`ggplot2`](https://ggplot2.tidyverse.org/) package, documented in `help("diamonds", package = "ggplot2")`. The cloud storage version of this data set consists of 5 Parquet files totaling less than 1MB in size. 310 | 311 | -The diamonds data set is hosted on both S3 and GCS, in a bucket named `voltrondata-labs-datasets`. To create an S3FileSystem object that refers to that bucket, use the following command: 312 | +The diamonds data set is hosted on both S3 and GCS, in a bucket named `arrow-datasets`. To create an S3FileSystem object that refers to that bucket, use the following command: 313 | 314 | ```r 315 | -bucket <- s3_bucket("voltrondata-labs-datasets") 316 | +bucket <- s3_bucket("arrow-datasets") 317 | ``` 318 | 319 | To do this for the GCS version of the data, the command is as follows: 320 | 321 | ```r 322 | -bucket <- gs_bucket("voltrondata-labs-datasets", anonymous = TRUE) 323 | +bucket <- gs_bucket("arrow-datasets", anonymous = TRUE) 324 | ``` 325 | 326 | Note that `anonymous = TRUE` is required for GCS if credentials have not been configured. 327 | @@ -126,7 +126,7 @@ df <- read_parquet(june2019$path("part-0.parquet")) 328 | `SubTreeFileSystem` can also be made from a URI: 329 | 330 | ```r 331 | -june2019 <- SubTreeFileSystem$create("s3://voltrondata-labs-datasets/nyc-taxi/year=2019/month=6") 332 | +june2019 <- SubTreeFileSystem$create("s3://arrow-datasets/nyc-taxi/year=2019/month=6") 333 | ``` 334 | --> 335 | 336 | @@ -150,8 +150,8 @@ gs://anonymous@bucket/path 337 | For example, the Parquet file storing the "good cut" diamonds that we downloaded earlier in the article is available on both S3 and CGS. The relevant URIs are as follows: 338 | 339 | ```r 340 | -uri <- "s3://voltrondata-labs-datasets/diamonds/cut=Good/part-0.parquet" 341 | -uri <- "gs://anonymous@voltrondata-labs-datasets/diamonds/cut=Good/part-0.parquet" 342 | +uri <- "s3://arrow-datasets/diamonds/cut=Good/part-0.parquet" 343 | +uri <- "gs://anonymous@arrow-datasets/diamonds/cut=Good/part-0.parquet" 344 | ``` 345 | 346 | Note that "anonymous" is required on GCS for public buckets. Regardless of which version you use, you can pass this URI to `read_parquet()` as if the file were stored locally: 347 | @@ -165,7 +165,7 @@ that are passed down to configure the underlying file system. They are separated 348 | by `&`. For example, 349 | 350 | ``` 351 | -s3://voltrondata-labs-datasets/?endpoint_override=https%3A%2F%2Fstorage.googleapis.com&allow_bucket_creation=true 352 | +s3://arrow-datasets/?endpoint_override=https%3A%2F%2Fstorage.googleapis.com&allow_bucket_creation=true 353 | ``` 354 | 355 | is equivalent to: 356 | @@ -175,7 +175,7 @@ bucket <- S3FileSystem$create( 357 | endpoint_override="https://storage.googleapis.com", 358 | allow_bucket_creation=TRUE 359 | ) 360 | -bucket$path("voltrondata-labs-datasets/") 361 | +bucket$path("arrow-datasets/") 362 | ``` 363 | 364 | Both tell the `S3FileSystem` object that it should allow the creation of new buckets 365 | @@ -198,7 +198,7 @@ a request may spend retrying before returning an error. The current default is 366 | 15 minutes, so in many interactive contexts it's nice to set a lower value: 367 | 368 | ``` 369 | -gs://anonymous@voltrondata-labs-datasets/diamonds/?retry_limit_seconds=10 370 | +gs://anonymous@arrow-datasets/diamonds/?retry_limit_seconds=10 371 | ``` 372 | 373 | ## Authentication 374 | @@ -237,9 +237,9 @@ If you haven't configured credentials, then to access *public* buckets, you 375 | must pass `anonymous = TRUE` or `anonymous` as the user in a URI: 376 | 377 | ```r 378 | -bucket <- gs_bucket("voltrondata-labs-datasets", anonymous = TRUE) 379 | +bucket <- gs_bucket("arrow-datasets", anonymous = TRUE) 380 | fs <- GcsFileSystem$create(anonymous = TRUE) 381 | -df <- read_parquet("gs://anonymous@voltrondata-labs-datasets/diamonds/cut=Good/part-0.parquet") 382 | +df <- read_parquet("gs://anonymous@arrow-datasets/diamonds/cut=Good/part-0.parquet") 383 | ``` 384 | 385 |