├── .clang-format ├── .editorconfig ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── 1-bug-report.yml │ ├── 2-feature-request.yml │ └── config.yml ├── dependabot.yml └── workflows │ ├── alpine.yml │ ├── cifuzz.yml │ ├── codeql.yml │ ├── dependency-review.yml │ ├── documentation.yml │ ├── emscripten.yml │ ├── lint_and_format_check.yml │ ├── macos_install.yml │ ├── pkg.yml │ ├── release-script-tests.yml │ ├── release_create.yml │ ├── release_prepare.yml │ ├── scorecard.yml │ ├── ubuntu-release.yml │ ├── ubuntu-s390x.yml │ ├── ubuntu-sanitized.yml │ ├── ubuntu-undef.yml │ ├── ubuntu.yml │ ├── ubuntu_install.yml │ ├── ubuntu_pedantic.yml │ ├── visual_studio.yml │ ├── visual_studio_clang.yml │ └── wpt-updater.yml ├── .gitignore ├── .python-version ├── CMakeLists.txt ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── SECURITY.md ├── ada.pc.in ├── benchmarks ├── CMakeLists.txt ├── bbc_bench.cpp ├── bench.cpp ├── bench_search_params.cpp ├── benchmark_header.h ├── benchmark_template.cpp ├── competitors │ └── servo-url │ │ ├── Cargo.lock │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── cbindgen.toml │ │ ├── lib.rs │ │ └── servo_url.h ├── model_bench.cpp ├── percent_encode.cpp ├── performancecounters │ ├── apple_arm_events.h │ ├── event_counter.h │ └── linux-perf-events.h ├── urlpattern.cpp └── wpt_bench.cpp ├── cmake ├── CPM.cmake ├── JoinPaths.cmake ├── ada-config.cmake.in ├── ada-flags.cmake ├── add-cpp-test.cmake └── codecoverage.cmake ├── docs ├── RELEASE.md ├── cli.md └── doxygen │ ├── footer.html │ └── header.html ├── doxygen ├── fuzz ├── ada_c.c ├── ada_c.options ├── build.sh ├── can_parse.cc ├── idna.cc ├── parse.cc ├── parse.options ├── url.dict ├── url_pattern.cc ├── url_pattern.options └── url_search_params.cc ├── include ├── ada.h ├── ada │ ├── ada_idna.h │ ├── ada_version.h │ ├── character_sets-inl.h │ ├── character_sets.h │ ├── checkers-inl.h │ ├── checkers.h │ ├── common_defs.h │ ├── encoding_type.h │ ├── errors.h │ ├── expected.h │ ├── helpers.h │ ├── implementation-inl.h │ ├── implementation.h │ ├── log.h │ ├── parser-inl.h │ ├── parser.h │ ├── scheme-inl.h │ ├── scheme.h │ ├── serializers.h │ ├── state.h │ ├── unicode-inl.h │ ├── unicode.h │ ├── url-inl.h │ ├── url.h │ ├── url_aggregator-inl.h │ ├── url_aggregator.h │ ├── url_base-inl.h │ ├── url_base.h │ ├── url_components-inl.h │ ├── url_components.h │ ├── url_pattern-inl.h │ ├── url_pattern.h │ ├── url_pattern_helpers-inl.h │ ├── url_pattern_helpers.h │ ├── url_pattern_init.h │ ├── url_pattern_regex.h │ ├── url_search_params-inl.h │ └── url_search_params.h └── ada_c.h ├── pyproject.toml ├── singleheader ├── CMakeLists.txt ├── README.md ├── amalgamate.py ├── demo.c └── demo.cpp ├── src ├── CMakeLists.txt ├── ada.cpp ├── ada_c.cpp ├── ada_idna.cpp ├── checkers.cpp ├── helpers.cpp ├── implementation.cpp ├── parser.cpp ├── serializers.cpp ├── unicode.cpp ├── url.cpp ├── url_aggregator.cpp ├── url_components.cpp ├── url_pattern.cpp ├── url_pattern_helpers.cpp └── url_pattern_regex.cpp ├── tests ├── CMakeLists.txt ├── ada_c.cpp ├── basic_fuzzer.cpp ├── basic_tests.cpp ├── from_file_tests.cpp ├── installation │ └── CMakeLists.txt ├── url_components.cpp ├── url_search_params.cpp ├── wasm │ ├── CMakeLists.txt │ ├── test.js.in │ └── wasm.cpp ├── wpt │ ├── CMakeLists.txt │ ├── IdnaTestV2-removed.json │ ├── IdnaTestV2.json │ ├── ada_extra_setters_tests.json │ ├── ada_extra_urltestdata.json │ ├── ada_long_urltestdata.json │ ├── percent-encoding.json │ ├── setters_tests.json │ ├── toascii.json │ ├── urlpattern-compare-test-data.json │ ├── urlpatterntestdata.json │ ├── urltestdata-javascript-only.json │ ├── urltestdata.json │ └── verifydnslength_tests.json ├── wpt_url_tests.cpp └── wpt_urlpattern_tests.cpp └── tools ├── CMakeLists.txt ├── cli ├── CMakeLists.txt ├── adaparse.cpp ├── benchmark_adaparse.sh ├── benchmark_write_to_file.sh └── line_iterator.h ├── prepare-doxygen.sh ├── release ├── __init__.py ├── create_release.py ├── lib │ ├── __init__.py │ ├── release.py │ ├── tests │ │ ├── __init__.py │ │ ├── samples │ │ │ ├── ada_version_h.txt │ │ │ ├── ada_version_h_expected.txt │ │ │ ├── cmakelists.txt │ │ │ ├── cmakelists_expected.txt │ │ │ ├── doxygen.txt │ │ │ └── doxygen_expected.txt │ │ ├── test_release.py │ │ └── test_update_versions.py │ └── versions.py ├── requirements.txt └── update_versions.py ├── run-clangcldocker.sh └── update-wpt.sh /.clang-format: -------------------------------------------------------------------------------- 1 | BasedOnStyle: Google 2 | SortIncludes: Never 3 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | indent_size = 2 7 | indent_style = space 8 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [anonrig, lemire] 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-bug-report.yml: -------------------------------------------------------------------------------- 1 | name: 🐛 Bug report 2 | description: Create a report to help us improve 3 | body: 4 | - type: markdown 5 | attributes: 6 | value: | 7 | Thank you for reporting an issue. 8 | 9 | Please fill in as much of the following form as you're able. 10 | - type: input 11 | attributes: 12 | label: Version 13 | description: Which Ada version are you referring to? 14 | - type: input 15 | attributes: 16 | label: Platform 17 | description: | 18 | UNIX: output of `uname -a` 19 | Windows: output of `"$([Environment]::OSVersion.VersionString) $(('x86', 'x64')[[Environment]::Is64BitOperatingSystem])"` in PowerShell console 20 | - type: textarea 21 | attributes: 22 | label: What steps will reproduce the bug? 23 | description: Enter details about your bug, preferably a simple code snippet that can be run directly without installing third-party dependencies. 24 | - type: textarea 25 | attributes: 26 | label: How often does it reproduce? Is there a required condition? 27 | - type: textarea 28 | attributes: 29 | label: What is the expected behavior? 30 | description: If possible please provide textual output instead of screenshots. 31 | - type: textarea 32 | attributes: 33 | label: What do you see instead? 34 | description: If possible please provide textual output instead of screenshots. 35 | validations: 36 | required: true 37 | - type: textarea 38 | attributes: 39 | label: Additional information 40 | description: Tell us anything else you think we should know. 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/2-feature-request.yml: -------------------------------------------------------------------------------- 1 | name: 🚀 Feature request 2 | description: Suggest an idea for this project 3 | labels: [feature request] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thank you for suggesting an idea to make Node.js better. 9 | 10 | Please fill in as much of the following form as you're able. 11 | - type: textarea 12 | attributes: 13 | label: What is the problem this feature will solve? 14 | validations: 15 | required: true 16 | - type: textarea 17 | attributes: 18 | label: What is the feature you are proposing to solve the problem? 19 | validations: 20 | required: true 21 | - type: textarea 22 | attributes: 23 | label: What alternatives have you considered? 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Looking for documentation? 4 | url: https://ada-url.github.io/ada 5 | about: Please navigate to our documentation website. 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Set update schedule for GitHub Actions 2 | 3 | version: 2 4 | updates: 5 | - package-ecosystem: github-actions 6 | directory: / 7 | schedule: 8 | interval: monthly 9 | 10 | - package-ecosystem: cargo 11 | directory: /benchmarks/competitors/servo-url 12 | schedule: 13 | interval: monthly 14 | 15 | - package-ecosystem: pip 16 | directory: /tools/release 17 | schedule: 18 | interval: monthly 19 | -------------------------------------------------------------------------------- /.github/workflows/alpine.yml: -------------------------------------------------------------------------------- 1 | name: Alpine Linux 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | - name: start docker 29 | run: | 30 | docker run -w /src -dit --name alpine -v $PWD:/src alpine:latest 31 | echo 'docker exec alpine "$@";' > ./alpine.sh 32 | chmod +x ./alpine.sh 33 | - name: install packages 34 | run: | 35 | ./alpine.sh apk update 36 | ./alpine.sh apk add build-base cmake g++ linux-headers git bash icu-dev 37 | - name: cmake 38 | run: | 39 | ./alpine.sh cmake -D ADA_TESTING=ON -DADA_BENCHMARKS=ON -B build_for_alpine 40 | - name: build 41 | run: | 42 | ./alpine.sh cmake --build build_for_alpine 43 | - name: test 44 | run: | 45 | ./alpine.sh bash -c "cd build_for_alpine && ctest ." 46 | -------------------------------------------------------------------------------- /.github/workflows/cifuzz.yml: -------------------------------------------------------------------------------- 1 | name: CIFuzz 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | permissions: read-all 13 | 14 | jobs: 15 | Fuzzing: 16 | runs-on: ubuntu-latest 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | sanitizer: [address, undefined, memory] 21 | steps: 22 | - name: Build Fuzzers (${{ matrix.sanitizer }}) 23 | id: build 24 | uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master 25 | with: 26 | oss-fuzz-project-name: 'ada-url' 27 | language: c++ 28 | sanitizer: ${{ matrix.sanitizer }} 29 | - name: Run Fuzzers (${{ matrix.sanitizer }}) 30 | uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master 31 | with: 32 | oss-fuzz-project-name: 'ada-url' 33 | language: c++ 34 | fuzz-seconds: 600 35 | sanitizer: ${{ matrix.sanitizer }} 36 | - name: Upload Crash 37 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 38 | if: steps.build.outcome == 'success' 39 | with: 40 | name: ${{ matrix.sanitizer }}-artifacts 41 | path: ./out/artifacts 42 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 1' 6 | 7 | permissions: 8 | contents: read 9 | security-events: write 10 | pull-requests: read 11 | actions: read 12 | 13 | jobs: 14 | analyze: 15 | name: Analyze 16 | 17 | runs-on: ubuntu-latest 18 | 19 | permissions: 20 | actions: read 21 | contents: read 22 | security-events: write 23 | 24 | strategy: 25 | fail-fast: false 26 | matrix: 27 | language: [ 'cpp', 'python' ] 28 | 29 | steps: 30 | - name: Checkout repository 31 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 32 | 33 | # Initializes the CodeQL tools for scanning. 34 | - name: Initialize CodeQL 35 | uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v2.2.5 36 | with: 37 | languages: ${{ matrix.language }} 38 | 39 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). 40 | # If this step fails, then you should remove it and run the build manually (see below) 41 | - name: Autobuild 42 | uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v2.2.5 43 | 44 | - name: Perform CodeQL Analysis 45 | uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v2.2.5 46 | with: 47 | category: "/language:${{matrix.language}}" 48 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | name: 'Dependency Review' 2 | 3 | on: [pull_request] 4 | 5 | permissions: 6 | contents: read 7 | 8 | jobs: 9 | dependency-review: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: 'Checkout Repository' 13 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 14 | - name: 'Dependency Review' 15 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 16 | -------------------------------------------------------------------------------- /.github/workflows/documentation.yml: -------------------------------------------------------------------------------- 1 | name: Doxygen GitHub Pages 2 | 3 | on: 4 | release: 5 | types: [created] 6 | # Allows you to run this workflow manually from the Actions tab 7 | workflow_dispatch: 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }} 11 | cancel-in-progress: true 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | deploy: 18 | permissions: 19 | contents: write 20 | pages: write 21 | id-token: write 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 25 | - name: Install theme 26 | run: ./tools/prepare-doxygen.sh 27 | - uses: mattnotmitt/doxygen-action@ded75d963c260fd8489801611a5079d149ebcc07 # edge 28 | with: 29 | doxyfile-path: './doxygen' 30 | - name: Deploy to GitHub Pages 31 | uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 32 | with: 33 | github_token: ${{ secrets.GITHUB_TOKEN }} 34 | publish_dir: docs/html 35 | -------------------------------------------------------------------------------- /.github/workflows/emscripten.yml: -------------------------------------------------------------------------------- 1 | name: emscripten 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | build: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 29 | - uses: mymindstorm/setup-emsdk@6ab9eb1bda2574c4ddb79809fc9247783eaf9021 # v14 30 | - name: Verify 31 | run: emcc -v 32 | - name: Checkout 33 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v3.6.0 34 | - name: Configure 35 | run: emcmake cmake -B buildwasm -D ADA_TESTING=ON -D ADA_TOOLS=OFF 36 | - name: Build 37 | run: cmake --build buildwasm 38 | - name: Test 39 | run: ctest --test-dir buildwasm 40 | -------------------------------------------------------------------------------- /.github/workflows/lint_and_format_check.yml: -------------------------------------------------------------------------------- 1 | name: Lint and format 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | lint-and-format: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | 29 | - name: Run clang-format 30 | uses: jidicula/clang-format-action@4726374d1aa3c6aecf132e5197e498979588ebc8 # v4.15.0 31 | with: 32 | clang-format-version: '17' 33 | fallback-style: 'Google' 34 | 35 | - uses: chartboost/ruff-action@e18ae971ccee1b2d7bbef113930f00c670b78da4 # v1.0.0 36 | name: Lint with Ruff 37 | with: 38 | version: 0.6.0 39 | -------------------------------------------------------------------------------- /.github/workflows/macos_install.yml: -------------------------------------------------------------------------------- 1 | name: macOS (Installation) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | macos-build: 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | shared: [OFF] 29 | runs-on: [macos-13, macos-14, macos-15] 30 | runs-on: ${{matrix.runs-on}} 31 | steps: 32 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 33 | - name: Prepare 34 | run: cmake -D ADA_TESTING=ON -DBUILD_SHARED_LIBS=${{matrix.shared}} -DCMAKE_INSTALL_PREFIX:PATH=destination -B build 35 | - name: Build 36 | run: cmake --build build -j=3 37 | - name: Install 38 | run: cmake --install build 39 | - name: Prepare test package 40 | run: cmake -DCMAKE_INSTALL_PREFIX:PATH=../../destination -S tests/installation -B buildbabyada 41 | - name: Build test package 42 | run: cmake --build buildbabyada 43 | - name: Run example 44 | run: ./buildbabyada/main 45 | -------------------------------------------------------------------------------- /.github/workflows/pkg.yml: -------------------------------------------------------------------------------- 1 | name: Debian pkg-config 2 | 3 | on: [push, pull_request] 4 | 5 | permissions: 6 | contents: read 7 | 8 | jobs: 9 | pkg-config: 10 | runs-on: ubuntu-latest 11 | container: 12 | image: debian:12 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - name: Install dependencies 18 | run: | 19 | apt -y update 20 | apt -y --no-install-recommends install g++ cmake make pkg-config 21 | 22 | - name: Build and install 23 | run: | 24 | cmake -B build 25 | cmake --build build 26 | cmake --install build 27 | 28 | - name: Test pkg-config 29 | run: pkg-config --cflags --libs ada 30 | -------------------------------------------------------------------------------- /.github/workflows/release-script-tests.yml: -------------------------------------------------------------------------------- 1 | name: Release Script Tests 2 | 3 | on: 4 | # workflow_call is used to indicate that a workflow can be called by another workflow. 5 | workflow_call: 6 | pull_request: 7 | types: [opened, synchronize, reopened, ready_for_review] 8 | paths-ignore: 9 | - '**.md' 10 | - 'docs/**' 11 | push: 12 | branches: 13 | - main 14 | paths-ignore: 15 | - '**.md' 16 | - 'docs/**' 17 | 18 | permissions: 19 | contents: read 20 | 21 | jobs: 22 | release-script-test: 23 | runs-on: ubuntu-latest 24 | defaults: 25 | run: 26 | working-directory: ./tools/release 27 | 28 | steps: 29 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 30 | 31 | - name: Prepare Python 32 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 33 | with: 34 | cache: 'pip' # caching pip dependencies 35 | 36 | - name: Install dependencies 37 | run: pip install -r requirements.txt 38 | 39 | - name: Run tests 40 | run: pytest -v 41 | -------------------------------------------------------------------------------- /.github/workflows/release_create.yml: -------------------------------------------------------------------------------- 1 | name: Release Create 2 | 3 | on: 4 | pull_request: 5 | types: [closed] 6 | 7 | env: 8 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 9 | 10 | jobs: 11 | check-release-conditions: 12 | runs-on: ubuntu-latest 13 | if: | 14 | github.event.pull_request.merged == true && 15 | github.event.pull_request.base.ref == 'main' && 16 | startsWith(github.event.pull_request.head.ref, 'release/v') && 17 | startsWith(github.event.pull_request.user.login, 'github-actions') 18 | 19 | steps: 20 | - name: Check release conditions 21 | run: | 22 | echo "All conditions have been met!" 23 | 24 | release-script-test: 25 | needs: check-release-conditions 26 | uses: ./.github/workflows/release-script-tests.yml 27 | 28 | create-release: 29 | permissions: 30 | contents: write 31 | needs: release-script-test 32 | runs-on: ubuntu-latest 33 | if: ${{ needs.release-script-test.result == 'success' }} 34 | 35 | env: 36 | NEXT_RELEASE_TAG: ${{ github.event.pull_request.head.ref }} 37 | steps: 38 | - name: Checkout 39 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 40 | 41 | - name: Prepare Python 42 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 43 | with: 44 | cache: 'pip' # caching pip dependencies 45 | 46 | - name: Install dependencies 47 | run: pip install -r ./tools/release/requirements.txt 48 | 49 | - name: Extract Tag from branch name 50 | run: | 51 | NEXT_RELEASE_TAG=$(echo $NEXT_RELEASE_TAG | sed 's/^release\///') 52 | echo "NEXT_RELEASE_TAG=${NEXT_RELEASE_TAG}" >> $GITHUB_ENV 53 | 54 | - name: Target release Tag 55 | run: echo "New tag $NEXT_RELEASE_TAG" 56 | 57 | - name: Amalgamation 58 | run: ./singleheader/amalgamate.py 59 | 60 | - name: "Create release" 61 | run: ./tools/release/create_release.py 62 | -------------------------------------------------------------------------------- /.github/workflows/release_prepare.yml: -------------------------------------------------------------------------------- 1 | name: Release Prepare 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | tag: 7 | type: string 8 | required: true 9 | description: "Tag for the next release. Ex.: v5.0.0" 10 | 11 | env: 12 | NEXT_RELEASE_TAG: ${{ github.event.inputs.tag }} 13 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 14 | 15 | jobs: 16 | release-script-test: 17 | uses: ./.github/workflows/release-script-tests.yml 18 | 19 | prepare-release-and-pull-request: 20 | permissions: 21 | contents: write 22 | pull-requests: write 23 | needs: release-script-test 24 | runs-on: ubuntu-22.04-arm 25 | if: ${{ needs.release-script-test.result == 'success' }} 26 | env: 27 | CXX: clang++-14 28 | steps: 29 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 30 | 31 | - name: Prepare Python 32 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 33 | with: 34 | cache: 'pip' # caching pip dependencies 35 | 36 | - name: Install dependencies 37 | run: pip install -r ./tools/release/requirements.txt 38 | 39 | - name: Update source code versions 40 | run: ./tools/release/update_versions.py 41 | 42 | - name: Ada Build 43 | run: cmake -B build && cmake --build build 44 | - name: Ada Test 45 | run: ctest --output-on-failure --test-dir build 46 | 47 | - name: Create PR with code updates for new release 48 | uses: peter-evans/create-pull-request@f3a21bf3404eae73a97f65817ab35f351a1a63fe #v5.0.0 49 | with: 50 | commit-message: "chore: release ${{ env.NEXT_RELEASE_TAG }}" 51 | branch: "release/${{ env.NEXT_RELEASE_TAG }}" 52 | title: "chore: release ${{ env.NEXT_RELEASE_TAG }}" 53 | token: ${{ env.GITHUB_TOKEN }} 54 | body: | 55 | This pull PR updates the source code version to ${{ env.NEXT_RELEASE_TAG }} 56 | delete-branch: true 57 | reviewers: "lemire,anonrig" 58 | -------------------------------------------------------------------------------- /.github/workflows/scorecard.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: '0 0 * * 1' 14 | 15 | # Declare default permissions as read only. 16 | permissions: read-all 17 | 18 | jobs: 19 | analysis: 20 | name: Scorecard analysis 21 | runs-on: ubuntu-latest 22 | permissions: 23 | # Needed to upload the results to code-scanning dashboard. 24 | security-events: write 25 | # Needed to publish results and get a badge (see publish_results below). 26 | id-token: write 27 | # Uncomment the permissions below if installing in a private repository. 28 | # contents: read 29 | # actions: read 30 | 31 | steps: 32 | - name: "Checkout code" 33 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 34 | with: 35 | persist-credentials: false 36 | 37 | - name: "Run analysis" 38 | uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 39 | with: 40 | results_file: results.sarif 41 | results_format: sarif 42 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 43 | # - you want to enable the Branch-Protection check on a *public* repository, or 44 | # - you are installing Scorecard on a *private* repository 45 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-fine-grained-pat-optional. 46 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 47 | 48 | # Public repositories: 49 | # - Publish results to OpenSSF REST API for easy access by consumers 50 | # - Allows the repository to include the Scorecard badge. 51 | # - See https://github.com/ossf/scorecard-action#publishing-results. 52 | # For private repositories: 53 | # - `publish_results` will always be set to `false`, regardless 54 | # of the value entered here. 55 | publish_results: true 56 | 57 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 58 | # format to the repository Actions tab. 59 | - name: "Upload artifact" 60 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 61 | with: 62 | name: SARIF file 63 | path: results.sarif 64 | retention-days: 5 65 | 66 | # Upload the results to GitHub's code scanning dashboard. 67 | - name: "Upload to code-scanning" 68 | uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 69 | with: 70 | sarif_file: results.sarif 71 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu-release.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 22.04 (Release build) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-release-build: 25 | runs-on: ubuntu-22.04 26 | strategy: 27 | matrix: 28 | cxx: [g++-12, clang++-14] 29 | steps: 30 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 31 | - name: Setup Ninja 32 | run: sudo apt-get install ninja-build 33 | - name: Prepare 34 | run: cmake -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release -G Ninja -B build 35 | env: 36 | CXX: ${{matrix.cxx}} 37 | - name: Build 38 | run: cmake --build build -j=4 39 | - name: Test 40 | run: ctest --output-on-failure --test-dir build 41 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu-s390x.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu s390x (GCC 12) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | build: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 29 | name: Test 30 | id: runcmd 31 | with: 32 | arch: s390x 33 | distro: ubuntu_latest 34 | githubToken: ${{ github.token }} 35 | install: | 36 | apt-get update -q -y 37 | apt-get install -y cmake make g++-12 gcc-12 git ninja-build 38 | run: | 39 | CC=gcc-12 CXX=g++-12 cmake -D ADA_TESTING=ON -DCMAKE_BUILD_TYPE=Release -G Ninja -B build 40 | rm -r -f dependencies 41 | CC=gcc-12 CXX=g++-12 cmake --build build -j=4 42 | ctest --output-on-failure --test-dir build 43 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu-sanitized.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 22.04 (GCC 12 SANITIZED) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | runs-on: ubuntu-22.04 26 | strategy: 27 | matrix: 28 | shared: [ON, OFF] 29 | steps: 30 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 31 | - name: Setup Ninja 32 | run: sudo apt-get install ninja-build 33 | - name: Prepare 34 | run: cmake -D ADA_TESTING=ON -DADA_SANITIZE=ON -DADA_DEVELOPMENT_CHECKS=ON -DBUILD_SHARED_LIBS=${{matrix.shared}} -G Ninja -B build 35 | env: 36 | CXX: g++-12 37 | - name: Build 38 | run: cmake --build build -j=4 39 | - name: Test 40 | run: ctest --output-on-failure --test-dir build 41 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu-undef.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 22.04 (GCC 12 SANITIZE UNDEFINED) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | runs-on: ubuntu-22.04 26 | strategy: 27 | matrix: 28 | shared: [ON, OFF] 29 | steps: 30 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 31 | - name: Setup Ninja 32 | run: sudo apt-get install ninja-build 33 | - name: Prepare 34 | run: cmake -D ADA_TESTING=ON -D ADA_SANITIZE_UNDEFINED=ON -DADA_DEVELOPMENT_CHECKS=ON -DBUILD_SHARED_LIBS=${{matrix.shared}} -G Ninja -B build 35 | env: 36 | CXX: g++-12 37 | - name: Build 38 | run: cmake --build build -j=4 39 | - name: Test 40 | run: ctest --output-on-failure --test-dir build 41 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 22.04 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | shared: [ON, OFF] 29 | cxx: [g++-12, clang++-15] 30 | runs-on: [ubuntu-22.04, ubuntu-22.04-arm] 31 | runs-on: ${{matrix.runs-on}} 32 | steps: 33 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 34 | - name: Setup Ninja 35 | run: sudo apt-get install ninja-build 36 | - name: Prepare 37 | run: cmake -D ADA_TESTING=ON -D ADA_BENCHMARKS=ON -DBUILD_SHARED_LIBS=${{matrix.shared}} -G Ninja -B build 38 | env: 39 | CXX: ${{matrix.cxx}} 40 | - name: Build 41 | run: cmake --build build -j=4 42 | - name: Test 43 | run: ctest --output-on-failure --test-dir build 44 | - name: Run default benchmark 45 | run: cd build && benchmarks/bench 46 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu_install.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 24.04 (Installation) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | runs-on: ubuntu-24.04 26 | strategy: 27 | matrix: 28 | shared: [ON, OFF] 29 | cxx: [g++-12, clang++] 30 | steps: 31 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 32 | - name: Setup Ninja 33 | run: sudo apt-get install ninja-build 34 | - name: Prepare 35 | run: cmake -D ADA_TESTING=ON -G Ninja -DBUILD_SHARED_LIBS=${{matrix.shared}} -DCMAKE_INSTALL_PREFIX:PATH=destination -B build 36 | env: 37 | CXX: ${{matrix.cxx}} 38 | - name: Build 39 | run: cmake --build build -j=4 40 | - name: Install 41 | run: cmake --install build 42 | - name: Prepare test package 43 | run: cmake -DCMAKE_INSTALL_PREFIX:PATH=../../destination -S tests/installation -B buildbabyada 44 | - name: Build test package 45 | run: cmake --build buildbabyada 46 | - name: Run example 47 | run: ./buildbabyada/main 48 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu_pedantic.yml: -------------------------------------------------------------------------------- 1 | name: Ubuntu 22.04 (GCC 12) Fails On Compiler Warnings 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ubuntu-build: 25 | runs-on: ubuntu-22.04 26 | strategy: 27 | matrix: 28 | shared: [ON, OFF] 29 | steps: 30 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 31 | - name: Setup Ninja 32 | run: sudo apt-get install ninja-build 33 | - name: Prepare 34 | run: cmake -D ADA_TESTING=ON -DBUILD_SHARED_LIBS=${{matrix.shared}} -G Ninja -B build 35 | env: 36 | CXX: g++-12 37 | CXXFLAGS: -Werror -Wextra -Wno-unused-parameter -Wimplicit-fallthrough 38 | - name: Build 39 | run: cmake --build build -j=4 40 | - name: Test 41 | run: ctest --output-on-failure --test-dir build 42 | -------------------------------------------------------------------------------- /.github/workflows/visual_studio.yml: -------------------------------------------------------------------------------- 1 | name: VS17-CI 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ci: 25 | name: windows-vs17 26 | runs-on: windows-2025 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | include: 31 | - {gen: Visual Studio 17 2022, arch: x64, devchecks: OFF, shared: OFF, config: Release} 32 | - {gen: Visual Studio 17 2022, arch: x64, devchecks: ON, shared: OFF, config: Debug} 33 | - {gen: Visual Studio 17 2022, arch: x64, devchecks: ON, shared: ON, config: Debug} 34 | - {gen: Visual Studio 17 2022, arch: Win32, devchecks: ON, shared: OFF, config: Debug} 35 | - {gen: Visual Studio 17 2022, arch: Win32, devchecks: ON, shared: ON, config: Debug} 36 | steps: 37 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 38 | - name: Configure 39 | run: | 40 | cmake -D ADA_TESTING=ON -DADA_DEVELOPMENT_CHECKS="${{matrix.devchecks}}" -G "${{matrix.gen}}" -A ${{matrix.arch}} -DBUILD_SHARED_LIBS=${{matrix.shared}} -B build 41 | - name: Build 42 | run: cmake --build build --config "${{matrix.config}}" --verbose 43 | - name: Run tests 44 | working-directory: build 45 | run: ctest -C "${{matrix.config}}" --output-on-failure 46 | -------------------------------------------------------------------------------- /.github/workflows/visual_studio_clang.yml: -------------------------------------------------------------------------------- 1 | name: VS17-clang-CI 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | - 'docs/**' 15 | 16 | permissions: 17 | contents: read 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | ci: 25 | name: windows-vs17 26 | runs-on: windows-2025 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | include: 31 | - {gen: Visual Studio 17 2022, arch: x64, devchecks: ON} 32 | steps: 33 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 34 | - name: Configure 35 | run: | 36 | cmake -D ADA_TESTING=ON -DADA_DEVELOPMENT_CHECKS="${{matrix.devchecks}}" -G "${{matrix.gen}}" -A ${{matrix.arch}} -T ClangCL -B build 37 | - name: Build Debug 38 | run: cmake --build build --config Debug --verbose 39 | - name: Run Debug tests 40 | working-directory: build 41 | run: ctest -C Debug --output-on-failure 42 | - name: Build Release 43 | run: cmake --build build --config Release --verbose 44 | - name: Run Release tests 45 | working-directory: build 46 | run: ctest -C Release --output-on-failure 47 | -------------------------------------------------------------------------------- /.github/workflows/wpt-updater.yml: -------------------------------------------------------------------------------- 1 | name: Update WPT 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | env: 8 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 9 | 10 | concurrency: 11 | group: wpt-updater 12 | cancel-in-progress: true 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | issue: 19 | runs-on: ubuntu-latest 20 | permissions: 21 | contents: write 22 | pull-requests: write 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | module: [url, urlpattern] 27 | steps: 28 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 29 | - name: Fetch tests 30 | run: tools/update-wpt.sh ${{matrix.module}} 31 | - name: Open pull request 32 | uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e #v6.0.5 33 | with: 34 | token: ${{secrets.GH_PAT}} 35 | commit-message: "test: update web platform tests" 36 | branch: automatic-update-wpt-${{matrix.module}} 37 | title: Update web platform tests (${{matrix.module}}) 38 | body: | 39 | This is an automated pull request for updating the WPT. 40 | 41 | - [Web Platform Tests](https://github.com/web-platform-tests/wpt/tree/master/url) 42 | - [Commit History](https://github.com/web-platform-tests/wpt/commits/master/url/resources) 43 | 44 | cc @anonrig @lemire 45 | team-reviewers: core 46 | delete-branch: true 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # common build directory 2 | build 3 | *-build-* 4 | 5 | # Python cache 6 | __pycache__ 7 | venv 8 | 9 | cmake-build-debug 10 | 11 | .cache 12 | docs/html 13 | docs/theme 14 | 15 | # Generated using only the Github workflow 16 | benchmark_result.json 17 | 18 | singleheader/ada.h 19 | singleheader/ada_c.h 20 | singleheader/ada.cpp 21 | singleheader/singleheader.zip 22 | 23 | benchmarks/competitors/servo-url/debug 24 | benchmarks/competitors/servo-url/target 25 | 26 | #ignore VScode 27 | .vscode/ 28 | .idea 29 | 30 | # bazel output 31 | bazel-* 32 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.12 2 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright 2023 Yagiz Nizipli and Daniel Lemire 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so, 8 | subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Please use the following contact information for reporting a vulnerability: 6 | 7 | - [Daniel Lemire](https://github.com/lemire) - daniel@lemire.me 8 | - [Yagiz Nizipli](https://github.com/anonrig) - yagiz@nizipli.com 9 | -------------------------------------------------------------------------------- /ada.pc.in: -------------------------------------------------------------------------------- 1 | prefix=@CMAKE_INSTALL_PREFIX@ 2 | includedir=@PKGCONFIG_INCLUDEDIR@ 3 | libdir=@PKGCONFIG_LIBDIR@ 4 | 5 | Name: @PROJECT_NAME@ 6 | Description: @PROJECT_DESCRIPTION@ 7 | URL: @PROJECT_HOMEPAGE_URL@ 8 | Version: @PROJECT_VERSION@ 9 | Cflags: -I${includedir} @PKGCONFIG_CFLAGS@ 10 | Libs: -L${libdir} -l@PROJECT_NAME@ 11 | @PKGCONFIG_LIBS_PRIVATE@ 12 | -------------------------------------------------------------------------------- /benchmarks/bbc_bench.cpp: -------------------------------------------------------------------------------- 1 | #include "benchmark_header.h" 2 | 3 | /** 4 | * Realistic URL examples collected from the BBC homepage. 5 | */ 6 | std::string url_examples[] = { 7 | "https://static.files.bbci.co.uk/orbit/737a4ee2bed596eb65afc4d2ce9af568/js/" 8 | "polyfills.js", 9 | "https://static.files.bbci.co.uk/orbit/737a4ee2bed596eb65afc4d2ce9af568/" 10 | "css/orbit-v5-ltr.min.css", 11 | "https://static.files.bbci.co.uk/orbit/737a4ee2bed596eb65afc4d2ce9af568/js/" 12 | "require.min.js", 13 | "https://static.files.bbci.co.uk/fonts/reith/2.512/BBCReithSans_W_Rg.woff2", 14 | "https://nav.files.bbci.co.uk/searchbox/c8bfe8595e453f2b9483fda4074e9d15/" 15 | "css/box.css", 16 | "https://static.files.bbci.co.uk/cookies/d3bb303e79f041fec95388e04f84e716/" 17 | "cookie-banner/cookie-library.bundle.js", 18 | "https://static.files.bbci.co.uk/account/id-cta/597/style/id-cta.css", 19 | "https://gn-web-assets.api.bbc.com/wwhp/" 20 | "20220908-1153-091014d07889c842a7bdc06e00fa711c9e04f049/responsive/css/" 21 | "old-ie.min.css", 22 | "https://gn-web-assets.api.bbc.com/wwhp/" 23 | "20220908-1153-091014d07889c842a7bdc06e00fa711c9e04f049/modules/vendor/" 24 | "bower/modernizr/modernizr.js"}; 25 | 26 | void init_data(const char* v = nullptr) {} 27 | 28 | double url_examples_bytes = []() -> double { 29 | size_t bytes{0}; 30 | for (std::string& url_string : url_examples) { 31 | bytes += url_string.size(); 32 | } 33 | return double(bytes); 34 | }(); 35 | 36 | #include "benchmark_template.cpp" 37 | -------------------------------------------------------------------------------- /benchmarks/bench.cpp: -------------------------------------------------------------------------------- 1 | #include "benchmark_header.h" 2 | 3 | /** 4 | * Realistic URL examples collected on the actual web. 5 | */ 6 | std::string url_examples_default[] = { 7 | "https://www.google.com/" 8 | "webhp?hl=en&ictx=2&sa=X&ved=0ahUKEwil_" 9 | "oSxzJj8AhVtEFkFHTHnCGQQPQgI", 10 | "https://support.google.com/websearch/" 11 | "?p=ws_results_help&hl=en-CA&fg=1", 12 | "https://en.wikipedia.org/wiki/Dog#Roles_with_humans", 13 | "https://www.tiktok.com/@aguyandagolden/video/7133277734310038830", 14 | "https://business.twitter.com/en/help/troubleshooting/" 15 | "how-twitter-ads-work.html?ref=web-twc-ao-gbl-adsinfo&utm_source=twc&utm_" 16 | "medium=web&utm_campaign=ao&utm_content=adsinfo", 17 | "https://images-na.ssl-images-amazon.com/images/I/" 18 | "41Gc3C8UysL.css?AUIClients/AmazonGatewayAuiAssets", 19 | "https://www.reddit.com/?after=t3_zvz1ze", 20 | "https://www.reddit.com/login/?dest=https%3A%2F%2Fwww.reddit.com%2F", 21 | "postgresql://other:9818274x1!!@localhost:5432/" 22 | "otherdb?connect_timeout=10&application_name=myapp", 23 | "http://192.168.1.1", // ipv4 24 | "http://[2606:4700:4700::1111]", // ipv6 25 | }; 26 | 27 | std::vector url_examples; 28 | 29 | double url_examples_bytes = []() -> double { 30 | size_t bytes{0}; 31 | for (std::string& url_string : url_examples) { 32 | bytes += url_string.size(); 33 | } 34 | return double(bytes); 35 | }(); 36 | 37 | #ifdef ADA_URL_FILE 38 | const char* default_file = ADA_URL_FILE; 39 | #else 40 | const char* default_file = nullptr; 41 | #endif 42 | 43 | size_t init_data(const char* input = default_file) { 44 | // compute the number of bytes. 45 | auto compute = []() -> double { 46 | size_t bytes{0}; 47 | for (std::string& url_string : url_examples) { 48 | bytes += url_string.size(); 49 | } 50 | return double(bytes); 51 | }; 52 | if (input == nullptr) { 53 | for (const std::string& s : url_examples_default) { 54 | url_examples.emplace_back(s); 55 | } 56 | url_examples_bytes = compute(); 57 | return url_examples.size(); 58 | } 59 | 60 | if (!file_exists(input)) { 61 | std::cout << "File not found !" << input << std::endl; 62 | for (const std::string& s : url_examples_default) { 63 | url_examples.emplace_back(s); 64 | } 65 | } else { 66 | std::cout << "Loading " << input << std::endl; 67 | url_examples = split_string(read_file(input)); 68 | } 69 | url_examples_bytes = compute(); 70 | return url_examples.size(); 71 | } 72 | #include "benchmark_template.cpp" 73 | -------------------------------------------------------------------------------- /benchmarks/benchmark_header.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #if ADA_VARIOUS_COMPETITION_ENABLED 9 | #include 10 | #include 11 | #include 12 | #endif 13 | #if ADA_url_whatwg_ENABLED 14 | #include 15 | #endif 16 | 17 | #include "ada.h" 18 | #include "performancecounters/event_counter.h" 19 | event_collector collector; 20 | size_t N = 1000; 21 | 22 | #include 23 | 24 | bool file_exists(const char* filename) { 25 | namespace fs = std::filesystem; 26 | std::filesystem::path f{filename}; 27 | if (std::filesystem::exists(filename)) { 28 | return true; 29 | } else { 30 | return false; 31 | } 32 | } 33 | 34 | std::string read_file(std::string filename) { 35 | constexpr size_t read_size = 4096; 36 | auto stream = std::ifstream(filename.c_str()); 37 | stream.exceptions(std::ios_base::badbit); 38 | std::string out; 39 | std::string buf(read_size, '\0'); 40 | while (stream.read(&buf[0], read_size)) { 41 | out.append(buf, 0, size_t(stream.gcount())); 42 | } 43 | out.append(buf, 0, size_t(stream.gcount())); 44 | return out; 45 | } 46 | 47 | std::vector split_string(const std::string& str) { 48 | std::vector result; 49 | std::stringstream ss{str}; 50 | for (std::string line; std::getline(ss, line, '\n');) { 51 | std::string_view view = line; 52 | // Some parsers like boost/url will refuse to parse a URL with trailing 53 | // whitespace. 54 | while (!view.empty() && std::isspace(view.back())) { 55 | view.remove_suffix(1); 56 | } 57 | while (!view.empty() && std::isspace(view.front())) { 58 | view.remove_prefix(1); 59 | } 60 | if (!view.empty()) { 61 | result.emplace_back(view); 62 | } 63 | } 64 | return result; 65 | } 66 | -------------------------------------------------------------------------------- /benchmarks/competitors/servo-url/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "servo-url" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | path = "lib.rs" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | url = "2.5.4" 12 | libc = "0.2" 13 | 14 | [profile.release] 15 | opt-level = 3 16 | debug = false 17 | lto = true 18 | -------------------------------------------------------------------------------- /benchmarks/competitors/servo-url/README.md: -------------------------------------------------------------------------------- 1 | ## Servo URL FFI 2 | 3 | This folder includes FFI bindings for servo/url. 4 | 5 | ### Links 6 | 7 | - https://github.com/eqrion/cbindgen/blob/master/docs.md 8 | - https://gist.github.com/zbraniecki/b251714d77ffebbc73c03447f2b2c69f 9 | - https://github.com/Michael-F-Bryan/rust-ffi-guide/blob/master/book/setting_up.md 10 | 11 | ### Building 12 | 13 | - Generating cbindgen output 14 | - Install dependencies with `brew install cbindgen` 15 | - Generate with `cbindgen --config cbindgen.toml --crate servo-url --output servo_url.h` 16 | - Building 17 | - Run with `cargo build --release` 18 | -------------------------------------------------------------------------------- /benchmarks/competitors/servo-url/cbindgen.toml: -------------------------------------------------------------------------------- 1 | autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" 2 | include_version = true 3 | braces = "SameLine" 4 | line_length = 100 5 | tab_width = 2 6 | language = "C++" 7 | namespaces = ["servo_url"] 8 | include_guard = "servo_url_ffi_h" 9 | 10 | [parse] 11 | parse_deps = true 12 | include = ["url"] 13 | -------------------------------------------------------------------------------- /benchmarks/competitors/servo-url/lib.rs: -------------------------------------------------------------------------------- 1 | use url::Url; 2 | use std::slice; 3 | use libc::{c_char, size_t}; 4 | 5 | extern crate url; 6 | extern crate libc; 7 | 8 | #[unsafe(no_mangle)] 9 | pub extern "C" fn parse_url(raw_input: *const c_char, raw_input_length: size_t) -> *mut Url { 10 | let input = unsafe { std::str::from_utf8_unchecked(slice::from_raw_parts(raw_input as *const u8, raw_input_length)) }; 11 | // This code would assume that the URL is parsed successfully: 12 | // let result = Url::parse(input).unwrap(); 13 | // Box::into_raw(Box::new(result)) 14 | // But we might get an invalid input. So we want to return null in case of 15 | // error. We can do it in such a manner: 16 | match Url::parse(input) { 17 | Ok(result) => Box::into_raw(Box::new(result)), 18 | Err(_) => std::ptr::null_mut(), 19 | } 20 | } 21 | 22 | #[unsafe(no_mangle)] 23 | pub extern "C" fn parse_url_to_href(raw_input: *const c_char, raw_input_length: size_t) -> *const c_char { 24 | let input = unsafe { std::str::from_utf8_unchecked(slice::from_raw_parts(raw_input as *const u8, raw_input_length)) }; 25 | match Url::parse(input) { 26 | Ok(result) => std::ffi::CString::new(result.as_str()).unwrap().into_raw(), 27 | Err(_) => std::ptr::null_mut(), 28 | } 29 | } 30 | 31 | #[unsafe(no_mangle)] 32 | pub extern "C" fn free_url(raw: *mut Url) { 33 | if raw.is_null() { 34 | return; 35 | } 36 | 37 | unsafe { drop(Box::from_raw(raw)) } 38 | } 39 | 40 | #[unsafe(no_mangle)] 41 | pub unsafe extern fn free_string(ptr: *const c_char) { 42 | // Take the ownership back to rust and drop the owner 43 | let _ = unsafe { std::ffi::CString::from_raw(ptr as *mut _) }; 44 | } 45 | -------------------------------------------------------------------------------- /benchmarks/competitors/servo-url/servo_url.h: -------------------------------------------------------------------------------- 1 | #ifndef servo_url_ffi_h 2 | #define servo_url_ffi_h 3 | 4 | /* This file was modified manually. */ 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | namespace servo_url { 13 | 14 | /// A parsed URL record. 15 | struct Url; 16 | 17 | extern "C" { 18 | 19 | Url *parse_url(const char *raw_input, size_t raw_input_length); 20 | 21 | void free_url(Url *raw); 22 | 23 | const char *parse_url_to_href(const char *raw_input, size_t raw_input_length); 24 | 25 | void free_string(const char *); 26 | } // extern "C" 27 | 28 | } // namespace servo_url 29 | 30 | #endif // servo_url_ffi_h 31 | -------------------------------------------------------------------------------- /benchmarks/performancecounters/event_counter.h: -------------------------------------------------------------------------------- 1 | #ifndef __EVENT_COUNTER_H 2 | #define __EVENT_COUNTER_H 3 | 4 | #include 5 | #ifndef _MSC_VER 6 | #include 7 | #endif 8 | #include 9 | 10 | #include 11 | 12 | #include 13 | #include 14 | 15 | #include "linux-perf-events.h" 16 | #ifdef __linux__ 17 | #include 18 | #endif 19 | 20 | #if __APPLE__ && __aarch64__ 21 | #include "apple_arm_events.h" 22 | #endif 23 | 24 | struct event_count { 25 | std::chrono::duration elapsed; 26 | std::vector event_counts; 27 | event_count() : elapsed(0), event_counts{0, 0, 0, 0, 0} {} 28 | event_count(const std::chrono::duration _elapsed, 29 | const std::vector _event_counts) 30 | : elapsed(_elapsed), event_counts(_event_counts) {} 31 | event_count(const event_count& other) 32 | : elapsed(other.elapsed), event_counts(other.event_counts) {} 33 | 34 | // The types of counters (so we can read the getter more easily) 35 | enum event_counter_types { 36 | CPU_CYCLES, 37 | INSTRUCTIONS, 38 | BRANCH_MISSES = 2, 39 | BRANCH = 4 40 | }; 41 | 42 | double elapsed_sec() const { 43 | return std::chrono::duration(elapsed).count(); 44 | } 45 | double elapsed_ns() const { 46 | return std::chrono::duration(elapsed).count(); 47 | } 48 | double cycles() const { 49 | return static_cast(event_counts[CPU_CYCLES]); 50 | } 51 | double instructions() const { 52 | return static_cast(event_counts[INSTRUCTIONS]); 53 | } 54 | double branches() const { return static_cast(event_counts[BRANCH]); } 55 | double branch_misses() const { 56 | return static_cast(event_counts[BRANCH_MISSES]); 57 | } 58 | event_count& operator=(const event_count& other) { 59 | this->elapsed = other.elapsed; 60 | this->event_counts = other.event_counts; 61 | return *this; 62 | } 63 | event_count operator+(const event_count& other) const { 64 | return event_count(elapsed + other.elapsed, 65 | { 66 | event_counts[0] + other.event_counts[0], 67 | event_counts[1] + other.event_counts[1], 68 | event_counts[2] + other.event_counts[2], 69 | event_counts[3] + other.event_counts[3], 70 | event_counts[4] + other.event_counts[4], 71 | }); 72 | } 73 | 74 | void operator+=(const event_count& other) { *this = *this + other; } 75 | }; 76 | 77 | struct event_aggregate { 78 | bool has_events = false; 79 | int iterations = 0; 80 | event_count total{}; 81 | event_count best{}; 82 | event_count worst{}; 83 | 84 | event_aggregate() = default; 85 | 86 | void operator<<(const event_count& other) { 87 | if (iterations == 0 || other.elapsed < best.elapsed) { 88 | best = other; 89 | } 90 | if (iterations == 0 || other.elapsed > worst.elapsed) { 91 | worst = other; 92 | } 93 | iterations++; 94 | total += other; 95 | } 96 | 97 | double elapsed_sec() const { return total.elapsed_sec() / iterations; } 98 | double elapsed_ns() const { return total.elapsed_ns() / iterations; } 99 | double cycles() const { return total.cycles() / iterations; } 100 | double instructions() const { return total.instructions() / iterations; } 101 | }; 102 | 103 | struct event_collector { 104 | event_count count{}; 105 | std::chrono::time_point start_clock{}; 106 | 107 | #if defined(__linux__) 108 | LinuxEvents linux_events; 109 | event_collector() 110 | : linux_events(std::vector{ 111 | PERF_COUNT_HW_CPU_CYCLES, 112 | PERF_COUNT_HW_INSTRUCTIONS, 113 | }) {} 114 | bool has_events() { return linux_events.is_working(); } 115 | #elif __APPLE__ && __aarch64__ 116 | AppleEvents apple_events; 117 | performance_counters diff; 118 | event_collector() : diff(0) { apple_events.setup_performance_counters(); } 119 | bool has_events() { return apple_events.setup_performance_counters(); } 120 | #else 121 | event_collector() {} 122 | bool has_events() { return false; } 123 | #endif 124 | 125 | inline void start() { 126 | #if defined(__linux) 127 | linux_events.start(); 128 | #elif __APPLE__ && __aarch64__ 129 | if (has_events()) { 130 | diff = apple_events.get_counters(); 131 | } 132 | #endif 133 | start_clock = std::chrono::steady_clock::now(); 134 | } 135 | inline event_count& end() { 136 | const auto end_clock = std::chrono::steady_clock::now(); 137 | #if defined(__linux) 138 | linux_events.end(count.event_counts); 139 | #elif __APPLE__ && __aarch64__ 140 | if (has_events()) { 141 | performance_counters end = apple_events.get_counters(); 142 | diff = end - diff; 143 | } 144 | count.event_counts[0] = diff.cycles; 145 | count.event_counts[1] = diff.instructions; 146 | count.event_counts[2] = diff.missed_branches; 147 | count.event_counts[3] = 0; 148 | count.event_counts[4] = diff.branches; 149 | #endif 150 | count.elapsed = end_clock - start_clock; 151 | return count; 152 | } 153 | }; 154 | 155 | #endif 156 | -------------------------------------------------------------------------------- /benchmarks/performancecounters/linux-perf-events.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifdef __linux__ 3 | 4 | #include // for __NR_perf_event_open 5 | #include // for perf event constants 6 | #include // for ioctl 7 | #include // for syscall 8 | 9 | #include // for errno 10 | #include // for memset 11 | #include 12 | 13 | #include 14 | #include 15 | 16 | template 17 | class LinuxEvents { 18 | int fd; 19 | bool working; 20 | perf_event_attr attribs{}; 21 | size_t num_events{}; 22 | std::vector temp_result_vec{}; 23 | std::vector ids{}; 24 | 25 | public: 26 | explicit LinuxEvents(std::vector config_vec) : fd(0), working(true) { 27 | memset(&attribs, 0, sizeof(attribs)); 28 | attribs.type = TYPE; 29 | attribs.size = sizeof(attribs); 30 | attribs.disabled = 1; 31 | attribs.exclude_kernel = 1; 32 | attribs.exclude_hv = 1; 33 | 34 | attribs.sample_period = 0; 35 | attribs.read_format = PERF_FORMAT_GROUP | PERF_FORMAT_ID; 36 | const int pid = 0; // the current process 37 | const int cpu = -1; // all CPUs 38 | const unsigned long flags = 0; 39 | 40 | int group = -1; // no group 41 | num_events = config_vec.size(); 42 | ids.resize(config_vec.size()); 43 | uint32_t i = 0; 44 | for (auto config : config_vec) { 45 | attribs.config = config; 46 | int _fd = static_cast( 47 | syscall(__NR_perf_event_open, &attribs, pid, cpu, group, flags)); 48 | if (_fd == -1) { 49 | report_error("perf_event_open"); 50 | } 51 | ioctl(_fd, PERF_EVENT_IOC_ID, &ids[i++]); 52 | if (group == -1) { 53 | group = _fd; 54 | fd = _fd; 55 | } 56 | } 57 | 58 | temp_result_vec.resize(num_events * 2 + 1); 59 | } 60 | 61 | ~LinuxEvents() { 62 | if (fd != -1) { 63 | close(fd); 64 | } 65 | } 66 | 67 | inline void start() { 68 | if (fd != -1) { 69 | if (ioctl(fd, PERF_EVENT_IOC_RESET, PERF_IOC_FLAG_GROUP) == -1) { 70 | report_error("ioctl(PERF_EVENT_IOC_RESET)"); 71 | } 72 | 73 | if (ioctl(fd, PERF_EVENT_IOC_ENABLE, PERF_IOC_FLAG_GROUP) == -1) { 74 | report_error("ioctl(PERF_EVENT_IOC_ENABLE)"); 75 | } 76 | } 77 | } 78 | 79 | inline void end(std::vector &results) { 80 | if (fd != -1) { 81 | if (ioctl(fd, PERF_EVENT_IOC_DISABLE, PERF_IOC_FLAG_GROUP) == -1) { 82 | report_error("ioctl(PERF_EVENT_IOC_DISABLE)"); 83 | } 84 | 85 | if (read(fd, temp_result_vec.data(), temp_result_vec.size() * 8) == -1) { 86 | report_error("read"); 87 | } 88 | } 89 | // our actual results are in slots 1,3,5, ... of this structure 90 | for (uint32_t i = 1; i < temp_result_vec.size(); i += 2) { 91 | results[i / 2] = temp_result_vec[i]; 92 | } 93 | for (uint32_t i = 2; i < temp_result_vec.size(); i += 2) { 94 | if (ids[i / 2 - 1] != temp_result_vec[i]) { 95 | report_error("event mismatch"); 96 | } 97 | } 98 | } 99 | 100 | bool is_working() { return working; } 101 | 102 | private: 103 | void report_error(const std::string &) { working = false; } 104 | }; 105 | #endif -------------------------------------------------------------------------------- /cmake/JoinPaths.cmake: -------------------------------------------------------------------------------- 1 | function(join_paths joined_path first_path_segment) 2 | set(temp_path "${first_path_segment}") 3 | foreach(current_segment IN LISTS ARGN) 4 | if(NOT ("${current_segment}" STREQUAL "")) 5 | if(IS_ABSOLUTE "${current_segment}") 6 | set(temp_path "${current_segment}") 7 | else() 8 | set(temp_path "${temp_path}/${current_segment}") 9 | endif() 10 | endif() 11 | endforeach() 12 | set(${joined_path} "${temp_path}" PARENT_SCOPE) 13 | endfunction() 14 | -------------------------------------------------------------------------------- /cmake/ada-config.cmake.in: -------------------------------------------------------------------------------- 1 | include("${CMAKE_CURRENT_LIST_DIR}/ada_targets.cmake") 2 | -------------------------------------------------------------------------------- /cmake/ada-flags.cmake: -------------------------------------------------------------------------------- 1 | option(ADA_LOGGING "verbose output (useful for debugging)" OFF) 2 | option(ADA_DEVELOPMENT_CHECKS "development checks (useful for debugging)" OFF) 3 | option(ADA_SANITIZE "Sanitize addresses" OFF) 4 | if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 5 | option(ADA_SANITIZE_BOUNDS_STRICT "Sanitize bounds (strict): only for GCC" OFF) 6 | endif() 7 | option(ADA_SANITIZE_UNDEFINED "Sanitize undefined behaviour" OFF) 8 | if(ADA_SANITIZE) 9 | message(STATUS "Address sanitizer enabled.") 10 | endif() 11 | if(ADA_SANITIZE_WITHOUT_LEAKS) 12 | message(STATUS "Address sanitizer (but not leak) enabled.") 13 | endif() 14 | if(ADA_SANITIZE_UNDEFINED) 15 | message(STATUS "Undefined sanitizer enabled.") 16 | endif() 17 | option(ADA_COVERAGE "Compute coverage" OFF) 18 | option(ADA_TOOLS "Build cli tools (adaparse)" OFF) 19 | option(ADA_BENCHMARKS "Build benchmarks" OFF) 20 | option(ADA_TESTING "Build tests" OFF) 21 | option(ADA_USE_UNSAFE_STD_REGEX_PROVIDER "Enable unsafe regex provider that uses std::regex" OFF) 22 | option(ADA_INCLUDE_URL_PATTERN "Include URL pattern implementation" ON) 23 | 24 | if (ADA_COVERAGE) 25 | message(STATUS "You want to compute coverage. We assume that you have installed gcovr.") 26 | if (NOT CMAKE_BUILD_TYPE) 27 | set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) 28 | endif() 29 | ####################### 30 | # You need to install gcovr. Under macos, you may do so with brew. 31 | # brew install gcovr 32 | # Then build... 33 | # cmake -D ADA_COVERAGE=ON -B buildcoverage 34 | # cmake --build buildcoverage 35 | # cmake --build buildcoverage --target ada_coverage 36 | # 37 | # open buildcoverage/ada_coverage/index.html 38 | ##################### 39 | include(${PROJECT_SOURCE_DIR}/cmake/codecoverage.cmake) 40 | APPEND_COVERAGE_COMPILER_FLAGS() 41 | setup_target_for_coverage_gcovr_html(NAME ada_coverage EXECUTABLE ctest EXCLUDE "${PROJECT_SOURCE_DIR}/dependencies/*" "${PROJECT_SOURCE_DIR}/tools/*" "${PROJECT_SOURCE_DIR}/singleheader/*" ${PROJECT_SOURCE_DIR}/include/ada/common_defs.h) 42 | endif() 43 | 44 | if (NOT CMAKE_BUILD_TYPE) 45 | if(ADA_SANITIZE OR ADA_SANITIZE_WITHOUT_LEAKS OR ADA_SANITIZE_BOUNDS_STRICT OR ADA_SANITIZE_UNDEFINED) 46 | message(STATUS "No build type selected, default to Debug because you have sanitizers.") 47 | set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) 48 | else() 49 | message(STATUS "No build type selected, default to Release") 50 | set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) 51 | endif() 52 | endif() 53 | 54 | set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/tools/cmake") 55 | set(CMAKE_EXPORT_COMPILE_COMMANDS ON) 56 | 57 | set(CMAKE_CXX_STANDARD 20) 58 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 59 | set(CMAKE_CXX_EXTENSIONS OFF) 60 | 61 | find_program(CCACHE_FOUND ccache) 62 | if(CCACHE_FOUND) 63 | message(STATUS "Ccache found using it as compiler launcher.") 64 | set(CMAKE_C_COMPILER_LAUNCHER ccache) 65 | set(CMAKE_CXX_COMPILER_LAUNCHER ccache) 66 | endif(CCACHE_FOUND) 67 | -------------------------------------------------------------------------------- /cmake/add-cpp-test.cmake: -------------------------------------------------------------------------------- 1 | # Helper so we don't have to repeat ourselves so much 2 | # Usage: add_cpp_test(testname [COMPILE_ONLY] [SOURCES a.cpp b.cpp ...] [LABELS acceptance per_implementation ...]) 3 | # SOURCES defaults to testname.cpp if not specified. 4 | function(add_cpp_test TEST_NAME) 5 | # Parse arguments 6 | cmake_parse_arguments(PARSE_ARGV 1 ARGS "COMPILE_ONLY;LIBRARY;WILL_FAIL" "" "SOURCES;LABELS;DEPENDENCY_OF") 7 | if (NOT ARGS_SOURCES) 8 | list(APPEND ARGS_SOURCES ${TEST_NAME}.cpp) 9 | endif() 10 | if (ARGS_COMPILE_ONLY) 11 | list(APPEND ${ARGS_LABELS} compile_only) 12 | endif() 13 | if(ADA_SANITIZE) 14 | add_compile_options(-fsanitize=address -fno-omit-frame-pointer -fno-sanitize-recover=all) 15 | add_compile_definitions(ASAN_OPTIONS=detect_leaks=1) 16 | endif() 17 | if(ADA_SANITIZE_WITHOUT_LEAKS) 18 | add_compile_options(-fsanitize=address -fno-omit-frame-pointer -fno-sanitize-recover=all) 19 | endif() 20 | if(ADA_SANITIZE_BOUNDS_STRICT) 21 | add_compile_options(-fsanitize=bounds-strict -fno-sanitize-recover=all) 22 | add_link_options(-fsanitize=bounds-strict) 23 | endif() 24 | if(ADA_SANITIZE_UNDEFINED) 25 | add_compile_options(-fsanitize=undefined -fno-sanitize-recover=all) 26 | add_link_options(-fsanitize=undefined) 27 | endif() 28 | # Add the compile target 29 | if (ARGS_LIBRARY) 30 | add_library(${TEST_NAME} STATIC ${ARGS_SOURCES}) 31 | else(ARGS_LIBRARY) 32 | add_executable(${TEST_NAME} ${ARGS_SOURCES}) 33 | endif(ARGS_LIBRARY) 34 | 35 | # Add test 36 | if (ARGS_COMPILE_ONLY OR ARGS_LIBRARY) 37 | add_test( 38 | NAME ${TEST_NAME} 39 | COMMAND ${CMAKE_COMMAND} --build . --target ${TEST_NAME} --config $ 40 | WORKING_DIRECTORY ${PROJECT_BINARY_DIR} 41 | ) 42 | set_target_properties(${TEST_NAME} PROPERTIES EXCLUDE_FROM_ALL TRUE EXCLUDE_FROM_DEFAULT_BUILD TRUE) 43 | else() 44 | add_test(${TEST_NAME} ${TEST_NAME}) 45 | 46 | # Add to