├── .codecov.yml
├── .coveragerc
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ ├── bug_report.yml
│ ├── config.yml
│ └── feature_request.yml
├── actions
│ └── cache-keys
│ │ └── action.yml
├── dependabot.yml
└── workflows
│ ├── aiohttp.yml
│ ├── auto-merge.yml
│ ├── ci-cd.yml
│ ├── codeql.yml
│ ├── reusable-build-wheel.yml
│ └── reusable-linters.yml
├── .gitignore
├── .mypy.ini
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── .yamllint
├── CHANGES.rst
├── CHANGES
├── .TEMPLATE.rst
├── .gitignore
└── README.rst
├── LICENSE
├── MANIFEST.in
├── Makefile
├── NOTICE
├── README.rst
├── benchmark.py
├── docs
├── Makefile
├── _static
│ └── yarl-icon-128x128.png
├── api.rst
├── changes.rst
├── conf.py
├── contributing
│ ├── guidelines.rst
│ └── release_guide.rst
├── index.rst
├── make.bat
├── spelling_wordlist.txt
└── yarl-icon-128x128.xcf
├── packaging
├── README.md
└── pep517_backend
│ ├── __init__.py
│ ├── __main__.py
│ ├── _backend.py
│ ├── _compat.py
│ ├── _cython_configuration.py
│ ├── _transformers.py
│ ├── cli.py
│ └── hooks.py
├── pyproject.toml
├── pytest.ini
├── requirements
├── codspeed.txt
├── cython-freethreading.txt
├── cython.txt
├── dev.txt
├── doc-spelling.txt
├── doc.txt
├── lint.txt
├── test-freethreading.txt
├── test-pure.txt
├── test.txt
└── towncrier.txt
├── setup.cfg
├── tests
├── test_cache.py
├── test_cached_property.py
├── test_normalize_path.py
├── test_pickle.py
├── test_quoting.py
├── test_quoting_benchmarks.py
├── test_update_query.py
├── test_url.py
├── test_url_benchmarks.py
├── test_url_build.py
├── test_url_cmp_and_hash.py
├── test_url_parsing.py
├── test_url_query.py
└── test_url_update_netloc.py
├── towncrier.toml
├── url_benchmark.py
└── yarl
├── __init__.py
├── _parse.py
├── _path.py
├── _query.py
├── _quoters.py
├── _quoting.py
├── _quoting_c.pyx
├── _quoting_py.py
├── _url.py
└── py.typed
/.codecov.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | codecov:
4 | notify:
5 | after_n_builds: 23 # The number of test matrix+lint jobs uploading coverage
6 | wait_for_ci: false
7 |
8 | require_ci_to_pass: false
9 |
10 | token: >- # notsecret # repo-scoped, upload-only, stability in fork PRs
11 | 26f4a393-24a9-48d9-8fa4-f1344d930846
12 |
13 | comment:
14 | require_changes: true
15 |
16 | coverage:
17 | range: 99.34..100
18 | status:
19 | patch:
20 | default:
21 | target: 100%
22 | flags:
23 | - pytest
24 | project:
25 | default:
26 | target: 100%
27 | lib:
28 | flags:
29 | - pytest
30 | paths:
31 | - yarl/
32 | target: 97.91%
33 | packaging:
34 | paths:
35 | - packaging/
36 | target: 75.24%
37 | tests:
38 | flags:
39 | - pytest
40 | paths:
41 | - tests/
42 | target: 99.87% # 100%
43 | typing:
44 | flags:
45 | - MyPy
46 | target: 100%
47 |
48 | ...
49 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [html]
2 | show_contexts = true
3 | skip_covered = false
4 |
5 | [paths]
6 | _site-packages-to-src-mapping =
7 | .
8 | */lib/pypy*/site-packages
9 | */lib/python*/site-packages
10 | *\Lib\site-packages
11 |
12 | [report]
13 | fail_under = 98.95
14 | skip_covered = true
15 | skip_empty = true
16 | show_missing = true
17 | exclude_also =
18 | ^\s*@pytest\.mark\.xfail
19 | if TYPE_CHECKING
20 | assert False
21 | : \.\.\.(\s*#.*)?$
22 | ^ +\.\.\.$
23 | omit =
24 | benchmark.py
25 | url_benchmark.py
26 | tests/*_benchmarks.py
27 |
28 | [run]
29 | branch = true
30 | cover_pylib = false
31 | # https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts
32 | # dynamic_context = test_function # conflicts with `pytest-cov` if set here
33 | parallel = true
34 | plugins =
35 | covdefaults
36 | Cython.Coverage
37 | relative_files = true
38 | source =
39 | .
40 | source_pkgs =
41 | yarl
42 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # These are supported funding model platforms
3 |
4 | github:
5 | - asvetlov
6 | - webknjaz
7 | - Dreamsorcerer
8 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: 🐞 Bug Report
3 | description: Create a report to help us improve.
4 | labels:
5 | - bug
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | **Thanks for taking a minute to file a bug report!**
11 |
12 | ⚠
13 | Verify first that your issue is not [already reported on
14 | GitHub][issue search].
15 |
16 | _Please fill out the form below with as many precise
17 | details as possible._
18 |
19 | [issue search]: ../search?q=is%3Aissue&type=issues
20 |
21 | - type: checkboxes
22 | id: terms
23 | attributes:
24 | label: Please confirm the following
25 | description: |
26 | Read the [aio-libs Code of Conduct][CoC] first. Check the existing issues
27 | on the tracker. Take into account the possibility of your report
28 | surfacing a security vulnerability.
29 |
30 | [CoC]: ../../.github/blob/master/CODE_OF_CONDUCT.md
31 | options:
32 | - label: |
33 | I agree to follow the [aio-libs Code of Conduct][CoC]
34 |
35 | [CoC]: ../../.github/blob/master/CODE_OF_CONDUCT.md
36 | required: true
37 | - label: |
38 | I have checked the [current issues][issue search] for duplicates.
39 |
40 | [issue search]: ../search?q=is%3Aissue&type=issues
41 | required: true
42 | - label: >-
43 | I understand this is open source software provided for free and
44 | that I might not receive a timely response.
45 | required: true
46 | - label: |
47 | I am positive I am **NOT** reporting a (potential) security
48 | vulnerability, to the best of my knowledge. *(These must be shared by
49 | submitting [this report form][vulnerability report form] instead, if
50 | any hesitation exists.)*
51 |
52 | [vulnerability report form]: ../security/advisories/new
53 | required: true
54 | - label: >-
55 | I am willing to submit a pull request with reporoducers as xfailing test
56 | cases or even entire fix. *(Assign this issue to me.)*
57 | required: false
58 |
59 | - type: textarea
60 | attributes:
61 | label: Describe the bug
62 | description: >-
63 | A clear and concise description of what the bug is.
64 | validations:
65 | required: true
66 |
67 | - type: textarea
68 | attributes:
69 | label: To Reproduce
70 | description: >-
71 | Describe the steps to reproduce this bug.
72 | placeholder: |
73 | 1. Have certain environment
74 | 2. Run given code snippet in a certain way
75 | 3. See some behavior described
76 | validations:
77 | required: true
78 |
79 | - type: textarea
80 | attributes:
81 | label: Expected behavior
82 | description: >-
83 | A clear and concise description of what you expected to happen.
84 | validations:
85 | required: true
86 |
87 | - type: textarea
88 | attributes:
89 | label: Logs/tracebacks
90 | description: |
91 | If applicable, add logs/tracebacks to help explain your problem.
92 | Paste the output of the steps above, including the commands
93 | themselves and their output/traceback etc.
94 | render: python-traceback
95 | validations:
96 | required: true
97 |
98 | - type: textarea
99 | attributes:
100 | label: Python Version
101 | description: Attach your version of Python.
102 | render: console
103 | value: |
104 | $ python --version
105 | validations:
106 | required: true
107 | - type: textarea
108 | attributes:
109 | label: multidict Version
110 | description: Attach your version of multidict.
111 | render: console
112 | value: |
113 | $ python -m pip show multidict
114 | validations:
115 | required: true
116 | - type: textarea
117 | attributes:
118 | label: propcache Version
119 | description: Attach your version of propcache.
120 | render: console
121 | value: |
122 | $ python -m pip show propcache
123 | validations:
124 | required: true
125 | - type: textarea
126 | attributes:
127 | label: yarl Version
128 | description: Attach your version of yarl.
129 | render: console
130 | value: |
131 | $ python -m pip show yarl
132 | validations:
133 | required: true
134 |
135 | - type: textarea
136 | attributes:
137 | label: OS
138 | placeholder: >-
139 | For example, Arch Linux, Windows, macOS, etc.
140 | validations:
141 | required: true
142 |
143 | - type: textarea
144 | attributes:
145 | label: Additional context
146 | description: |
147 | Add any other context about the problem here.
148 |
149 | Describe the environment you have that lead to your issue.
150 | This includes proxy server and other bits that are related to your case.
151 |
152 | ...
153 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # yamllint disable rule:line-length
4 | # Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
5 | # yamllint enable rule:line-length
6 | blank_issues_enabled: false # default: true
7 | contact_links:
8 | - name: 🔐 Security bug report 🔥
9 | url: https://github.com/aio-libs/.github/security/policy
10 | about: |
11 | Please learn how to report security vulnerabilities here.
12 |
13 | For all security related bugs, send an email
14 | instead of using this issue tracker and you
15 | will receive a prompt response.
16 |
17 | For more information, see
18 | https://github.com/aio-libs/.github/security/policy
19 | - name: >-
20 | [🎉 NEW 🎉]
21 | 🤷💻🤦 GitHub Discussions
22 | url: https://github.com/aio-libs/yarl/discussions
23 | about: >-
24 | Please ask typical Q&A in the Discussions tab or on StackOverflow
25 | - name: 🤷💻🤦 StackOverflow
26 | url: https://stackoverflow.com/questions/tagged/aiohttp
27 | about: >-
28 | Please ask typical Q&A here or in the
29 | Discussions tab @ https://github.com/aio-libs/yarl/discussions
30 | - name: 💬 Gitter Chat
31 | url: https://gitter.im/aio-libs/Lobby
32 | about: Chat with devs and community
33 | - name: 📝 Code of Conduct
34 | url: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
35 | about: ❤ Be nice to other members of the community. ☮ Behave.
36 |
37 | ...
38 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: 🚀 Feature request
3 | description: Suggest an idea for this project.
4 | labels:
5 | - enhancement
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | **Thanks for taking a minute to file a feature for multidict!**
11 |
12 | ⚠
13 | Verify first that your feature request is not [already reported on
14 | GitHub][issue search].
15 |
16 | _Please fill out the form below with as many precise
17 | details as possible._
18 |
19 | [issue search]: ../search?q=is%3Aissue&type=issues
20 |
21 | - type: textarea
22 | attributes:
23 | label: Is your feature request related to a problem?
24 | description: >-
25 | Please add a clear and concise description of what
26 | the problem is. _Ex. I'm always frustrated when [...]_
27 |
28 | - type: textarea
29 | attributes:
30 | label: Describe the solution you'd like
31 | description: >-
32 | A clear and concise description of what you want to happen.
33 | validations:
34 | required: true
35 |
36 | - type: textarea
37 | attributes:
38 | label: Describe alternatives you've considered
39 | description: >-
40 | A clear and concise description of any alternative solutions
41 | or features you've considered.
42 | validations:
43 | required: true
44 |
45 | - type: textarea
46 | attributes:
47 | label: Additional context
48 | description: >-
49 | Add any other context or screenshots about
50 | the feature request here.
51 |
52 | - type: checkboxes
53 | attributes:
54 | label: Code of Conduct
55 | description: |
56 | Read the [aio-libs Code of Conduct][CoC] first.
57 |
58 | [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
59 | options:
60 | - label: I agree to follow the aio-libs Code of Conduct
61 | required: true
62 | ...
63 |
--------------------------------------------------------------------------------
/.github/actions/cache-keys/action.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: placeholder
4 | description: placeholder
5 |
6 | outputs:
7 | cache-key-for-dep-files:
8 | description: >-
9 | A cache key string derived from the dependency declaration files.
10 | value: ${{ steps.calc-cache-key-files.outputs.files-hash-key }}
11 |
12 | runs:
13 | using: composite
14 | steps:
15 | - name: >-
16 | Calculate dependency files' combined hash value
17 | for use in the cache key
18 | id: calc-cache-key-files
19 | run: |
20 | from os import environ
21 | from pathlib import Path
22 |
23 | FILE_APPEND_MODE = 'a'
24 |
25 | files_derived_hash = '${{
26 | hashFiles(
27 | 'tox.ini',
28 | 'pyproject.toml',
29 | '.pre-commit-config.yaml',
30 | 'pytest.ini',
31 | 'requirements/**',
32 | 'setup.cfg'
33 | )
34 | }}'
35 |
36 | print(f'Computed file-derived hash is {files_derived_hash}.')
37 |
38 | with Path(environ['GITHUB_OUTPUT']).open(
39 | mode=FILE_APPEND_MODE,
40 | ) as outputs_file:
41 | print(
42 | f'files-hash-key={files_derived_hash}',
43 | file=outputs_file,
44 | )
45 | shell: python
46 |
47 | ...
48 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | version: 2
4 | updates:
5 |
6 | # Maintain dependencies for GitHub Actions
7 | - package-ecosystem: "github-actions"
8 | directory: "/"
9 | labels:
10 | - dependencies
11 | schedule:
12 | interval: "daily"
13 |
14 | # Maintain dependencies for Python
15 | - package-ecosystem: "pip"
16 | directory: "/"
17 | labels:
18 | - dependencies
19 | schedule:
20 | interval: "daily"
21 | open-pull-requests-limit: 10
22 |
23 | ...
24 |
--------------------------------------------------------------------------------
/.github/workflows/aiohttp.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Aiohttp
4 |
5 |
6 | on:
7 | merge_group:
8 | push:
9 | branches:
10 | - master
11 | tags:
12 | - v*
13 | pull_request:
14 | branches:
15 | - master
16 |
17 |
18 | env:
19 | COLOR: "yes"
20 | FORCE_COLOR: 1
21 | PY_COLORS: 1
22 |
23 |
24 | jobs:
25 |
26 | test-aiohttp:
27 | name: Aiohttp tests
28 | runs-on: ubuntu-latest
29 | timeout-minutes: 30
30 | strategy:
31 | matrix:
32 | branch: ['master', '3.11', '3.12']
33 | steps:
34 | - name: Checkout aiohttp
35 | uses: actions/checkout@v4
36 | with:
37 | repository: aio-libs/aiohttp
38 | ref: ${{ matrix.branch }}
39 | submodules: true
40 | - name: Checkout yarl
41 | uses: actions/checkout@v4
42 | with:
43 | path: vendor/yarl
44 | - name: Setup Python
45 | uses: actions/setup-python@v5
46 | with:
47 | python-version: 3.x
48 | cache: pip
49 | cache-dependency-path: requirements/*.txt
50 | - name: Provision the dev env
51 | run: make .develop
52 | - name: Cythonize yarl
53 | working-directory: vendor/yarl
54 | run: make cythonize
55 | - name: Install yarl
56 | working-directory: vendor/yarl
57 | run: >-
58 | python -m pip install -e .
59 | - name: Run tests
60 | run: python -m pytest
61 | shell: bash
62 |
63 | ...
64 |
--------------------------------------------------------------------------------
/.github/workflows/auto-merge.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Dependabot auto-merge
4 | on: pull_request_target
5 |
6 | permissions:
7 | pull-requests: write
8 | contents: write
9 |
10 | jobs:
11 | dependabot:
12 | runs-on: ubuntu-latest
13 | timeout-minutes: 1
14 | if: ${{ github.actor == 'dependabot[bot]' }}
15 | steps:
16 | - name: Dependabot metadata
17 | id: metadata
18 | uses: dependabot/fetch-metadata@v2
19 | with:
20 | github-token: "${{ secrets.GITHUB_TOKEN }}"
21 | - name: Enable auto-merge for Dependabot PRs
22 | run: gh pr merge --auto --squash "$PR_URL"
23 | env:
24 | PR_URL: ${{github.event.pull_request.html_url}}
25 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
26 |
27 | ...
28 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: "CodeQL"
4 |
5 | on:
6 | push:
7 | branches:
8 | - master
9 | pull_request:
10 | branches:
11 | - master
12 | schedule:
13 | - cron: "46 14 * * 0"
14 |
15 | jobs:
16 | analyze:
17 | name: Analyze
18 | runs-on: ubuntu-latest
19 | timeout-minutes: 4
20 | permissions:
21 | actions: read
22 | contents: read
23 | security-events: write
24 |
25 | strategy:
26 | fail-fast: false
27 | matrix:
28 | language:
29 | - python
30 |
31 | steps:
32 | - name: Checkout
33 | uses: actions/checkout@v4
34 |
35 | - name: Initialize CodeQL
36 | uses: github/codeql-action/init@v3
37 | with:
38 | languages: ${{ matrix.language }}
39 | queries: +security-and-quality
40 |
41 | - name: Autobuild
42 | uses: github/codeql-action/autobuild@v3
43 |
44 | - name: Perform CodeQL Analysis
45 | uses: github/codeql-action/analyze@v3
46 | with:
47 | category: "/language:${{ matrix.language }}"
48 |
49 | ...
50 |
--------------------------------------------------------------------------------
/.github/workflows/reusable-build-wheel.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Build wheel
4 |
5 | on:
6 | workflow_call:
7 | inputs:
8 | dists-artifact-name:
9 | description: Workflow artifact name containing dists
10 | required: true
11 | type: string
12 | cython-tracing:
13 | description: Whether to build Cython modules with line tracing
14 | default: '0'
15 | required: false
16 | type: string
17 | os:
18 | description: VM OS to use, without version suffix
19 | default: ubuntu
20 | required: false
21 | type: string
22 | qemu:
23 | description: Emulated QEMU architecture
24 | default: ''
25 | required: false
26 | type: string
27 | tag:
28 | description: Build platform tag wheels
29 | default: ''
30 | required: false
31 | type: string
32 | source-tarball-name:
33 | description: Sdist filename wildcard
34 | required: true
35 | type: string
36 | wheel-tags-to-skip:
37 | description: Wheel tags to skip building
38 | default: ''
39 | required: false
40 | type: string
41 |
42 | env:
43 | FORCE_COLOR: "1" # Make tools pretty.
44 | PIP_DISABLE_PIP_VERSION_CHECK: "1"
45 | PIP_NO_PYTHON_VERSION_WARNING: "1"
46 |
47 | jobs:
48 |
49 | build-wheel:
50 | name: >-
51 | Build ${{ inputs.tag }} wheels on ${{ inputs.os }} ${{ inputs.qemu }}
52 | runs-on: ${{ inputs.os }}-latest
53 | timeout-minutes: ${{ inputs.qemu && 60 || 20 }}
54 | steps:
55 | - name: Retrieve the project source from an sdist inside the GHA artifact
56 | uses: re-actors/checkout-python-sdist@release/v2
57 | with:
58 | source-tarball-name: ${{ inputs.source-tarball-name }}
59 | workflow-artifact-name: ${{ inputs.dists-artifact-name }}
60 |
61 | - name: Set up QEMU
62 | if: inputs.qemu
63 | uses: docker/setup-qemu-action@v3
64 | with:
65 | platforms: all
66 | # This should be temporary
67 | # xref https://github.com/docker/setup-qemu-action/issues/188
68 | # xref https://github.com/tonistiigi/binfmt/issues/215
69 | image: tonistiigi/binfmt:qemu-v8.1.5
70 | id: qemu
71 | - name: Prepare emulation
72 | if: inputs.qemu
73 | run: |
74 | # Build emulated architectures only if QEMU is set,
75 | # use default "auto" otherwise
76 | echo "CIBW_ARCHS_LINUX=${{ inputs.qemu }}" >> "${GITHUB_ENV}"
77 | shell: bash
78 |
79 | - name: Skip building some wheel tags
80 | if: inputs.wheel-tags-to-skip
81 | run: |
82 | echo "CIBW_SKIP=${{ inputs.wheel-tags-to-skip }}" >> "${GITHUB_ENV}"
83 | shell: bash
84 |
85 | - name: Build wheels
86 | uses: pypa/cibuildwheel@v2.23.2
87 | env:
88 | CIBW_ARCHS_MACOS: x86_64 arm64 universal2
89 | CIBW_CONFIG_SETTINGS: >- # Cython line tracing for coverage collection
90 | pure-python=false
91 | with-cython-tracing=${{ inputs.cython-tracing }}
92 |
93 | - name: Upload built artifacts for testing and publishing
94 | uses: actions/upload-artifact@v4
95 | with:
96 | name: ${{ inputs.dists-artifact-name }}-
97 | ${{ inputs.os }}-
98 | ${{ inputs.qemu }}-
99 | ${{ inputs.tag }}
100 | path: ./wheelhouse/*.whl
101 |
102 | ...
103 |
--------------------------------------------------------------------------------
/.github/workflows/reusable-linters.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Linters
4 |
5 | on:
6 | workflow_call:
7 | secrets:
8 | codecov-token:
9 | description: Mandatory token for uploading to Codecov
10 | required: true
11 |
12 | env:
13 | COLOR: >- # Supposedly, pytest or coveragepy use this
14 | yes
15 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it
16 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement
17 | PIP_DISABLE_PIP_VERSION_CHECK: 1
18 | PIP_NO_PYTHON_VERSION_WARNING: 1
19 | PIP_NO_WARN_SCRIPT_LOCATION: 1
20 | PRE_COMMIT_COLOR: always
21 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest`
22 | PYTHONIOENCODING: utf-8
23 | PYTHONUTF8: 1
24 | PYTHON_LATEST: 3.12
25 |
26 | jobs:
27 |
28 | lint:
29 | name: Linter
30 | runs-on: ubuntu-latest
31 | timeout-minutes: 15
32 | steps:
33 | - name: Checkout
34 | uses: actions/checkout@v4
35 | - name: Setup Python ${{ env.PYTHON_LATEST }}
36 | uses: actions/setup-python@v5
37 | with:
38 | python-version: ${{ env.PYTHON_LATEST }}
39 | - name: >-
40 | Calculate dependency files' combined hash value
41 | for use in the cache key
42 | id: calc-cache-key-files
43 | uses: ./.github/actions/cache-keys
44 | - name: Set up pip cache
45 | uses: re-actors/cache-python-deps@release/v1
46 | with:
47 | cache-key-for-dependency-files: >-
48 | ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }}
49 | - name: Cache pre-commit.com virtualenvs
50 | uses: actions/cache@v4
51 | with:
52 | path: ~/.cache/pre-commit
53 | key: >-
54 | ${{
55 | runner.os
56 | }}-pre-commit-${{
57 | hashFiles('.pre-commit-config.yaml')
58 | }}
59 | - name: Install dependencies
60 | uses: py-actions/py-dependency-install@v4
61 | with:
62 | path: requirements/lint.txt
63 | - name: Self-install
64 | run: |
65 | pip install .
66 | - name: Run linters
67 | run: |
68 | make lint
69 | - name: Send coverage data to Codecov
70 | uses: codecov/codecov-action@v5
71 | with:
72 | token: ${{ secrets.codecov-token }}
73 | files: >-
74 | .tox/.tmp/.mypy/python-3.11/cobertura.xml
75 | flags: >-
76 | CI-GHA,
77 | MyPy
78 | fail_ci_if_error: true
79 | - name: Install spell checker
80 | run: |
81 | sudo apt install libenchant-2-dev
82 | pip install -r requirements/doc-spelling.txt
83 | - name: Run docs spelling
84 | run: |
85 | make doc-spelling
86 | - name: Prepare twine checker
87 | run: |
88 | pip install -U build twine
89 | python -m build --config-setting=pure-python=true
90 | - name: Run twine checker
91 | run: |
92 | twine check --strict dist/*
93 |
94 | ...
95 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask stuff:
57 | instance/
58 | .webassets-cache
59 |
60 | # Scrapy stuff:
61 | .scrapy
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | # IPython Notebook
70 | .ipynb_checkpoints
71 |
72 | # pyenv
73 | .python-version
74 |
75 | # celery beat schedule file
76 | celerybeat-schedule
77 |
78 | # dotenv
79 | .env
80 |
81 | # virtualenv
82 | .venv/
83 | venv/
84 | ENV/
85 |
86 | # Spyder project settings
87 | .spyderproject
88 |
89 | # Rope project settings
90 | .ropeproject
91 |
92 | coverage
93 |
94 |
95 | yarl/*.c
96 | yarl/*.html
97 |
98 | .develop
99 |
100 | # Idea
101 | .idea
102 |
103 | .mypy_cache
104 | .install-cython
105 | .install-deps
106 | .pytest_cache
107 | pip-wheel-metadata
108 |
--------------------------------------------------------------------------------
/.mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | files = packaging, tests, yarl
3 | check_untyped_defs = True
4 | follow_imports_for_stubs = True
5 | disallow_any_decorated = True
6 | disallow_any_generics = True
7 | disallow_any_unimported = True
8 | disallow_incomplete_defs = True
9 | disallow_subclassing_any = True
10 | disallow_untyped_calls = True
11 | disallow_untyped_decorators = True
12 | disallow_untyped_defs = True
13 | # TODO(PY312): explicit-override
14 | enable_error_code = deprecated, ignore-without-code, possibly-undefined, redundant-expr, redundant-self, truthy-bool, truthy-iterable, unused-awaitable
15 | extra_checks = True
16 | follow_untyped_imports = True
17 | implicit_reexport = False
18 | no_implicit_optional = True
19 | pretty = True
20 | show_column_numbers = True
21 | show_error_codes = True
22 | show_error_code_links = True
23 | strict_bytes = True
24 | strict_equality = True
25 | warn_incomplete_stub = True
26 | warn_redundant_casts = True
27 | warn_return_any = True
28 | warn_unreachable = True
29 | warn_unused_ignores = True
30 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | ci:
4 | autoupdate_schedule: quarterly
5 | skip:
6 | - actionlint-docker
7 |
8 | repos:
9 | - repo: https://github.com/pre-commit/pre-commit-hooks
10 | rev: 'v5.0.0'
11 | hooks:
12 | - id: check-merge-conflict
13 | - repo: https://github.com/asottile/yesqa
14 | rev: v1.5.0
15 | hooks:
16 | - id: yesqa
17 | additional_dependencies:
18 | - wemake-python-styleguide
19 | - repo: https://github.com/PyCQA/isort
20 | rev: '6.0.1'
21 | hooks:
22 | - id: isort
23 | - repo: https://github.com/psf/black
24 | rev: '25.1.0'
25 | hooks:
26 | - id: black
27 | language_version: python3 # Should be a command that runs python
28 |
29 | - repo: https://github.com/python-jsonschema/check-jsonschema.git
30 | rev: 0.32.1
31 | hooks:
32 | - id: check-github-workflows
33 | files: ^\.github/workflows/[^/]+$
34 | types:
35 | - yaml
36 | - id: check-jsonschema
37 | alias: check-github-workflows-timeout
38 | name: Check GitHub Workflows set timeout-minutes
39 | args:
40 | - --builtin-schema
41 | - github-workflows-require-timeout
42 | files: ^\.github/workflows/[^/]+$
43 | types:
44 | - yaml
45 | - id: check-readthedocs
46 |
47 | - repo: https://github.com/pre-commit/pre-commit-hooks
48 | rev: 'v5.0.0'
49 | hooks:
50 | - id: end-of-file-fixer
51 | - id: requirements-txt-fixer
52 | - id: trailing-whitespace
53 | - id: file-contents-sorter
54 | files: |
55 | docs/spelling_wordlist.txt|
56 | .gitignore|
57 | .gitattributes
58 | - id: check-case-conflict
59 | - id: check-json
60 | - id: check-xml
61 | - id: check-executables-have-shebangs
62 | - id: check-toml
63 | - id: check-xml
64 | - id: check-yaml
65 | - id: debug-statements
66 | - id: check-added-large-files
67 | - id: check-symlinks
68 | - id: debug-statements
69 | - id: detect-aws-credentials
70 | args: ['--allow-missing-credentials']
71 | - id: detect-private-key
72 | exclude: ^examples/
73 | - repo: https://github.com/asottile/pyupgrade
74 | rev: 'v3.19.1'
75 | hooks:
76 | - id: pyupgrade
77 | args: ['--py39-plus']
78 | - repo: https://github.com/PyCQA/flake8
79 | rev: '7.2.0'
80 | hooks:
81 | - id: flake8
82 | exclude: "^docs/"
83 |
84 | - repo: https://github.com/codespell-project/codespell.git
85 | rev: v2.4.1
86 | hooks:
87 | - id: codespell
88 |
89 | - repo: https://github.com/adrienverge/yamllint.git
90 | rev: v1.37.0
91 | hooks:
92 | - id: yamllint
93 | args:
94 | - --strict
95 |
96 | - repo: https://github.com/MarcoGorelli/cython-lint.git
97 | rev: v0.16.6
98 | hooks:
99 | - id: cython-lint
100 |
101 | - repo: https://github.com/Lucas-C/pre-commit-hooks-markup
102 | rev: v1.0.1
103 | hooks:
104 | - id: rst-linter
105 | exclude: ^CHANGES\.rst$
106 | files: >-
107 | ^[^/]+[.]rst$
108 |
109 | - repo: https://github.com/pre-commit/mirrors-mypy.git
110 | rev: v1.15.0
111 | hooks:
112 | - id: mypy
113 | alias: mypy-py311
114 | name: MyPy, for Python 3.11
115 | additional_dependencies:
116 | - Cython
117 | - expandvars
118 | - idna
119 | - hypothesis
120 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report`
121 | - multidict
122 | - propcache >= 0.3.1
123 | - pytest
124 | - tomli # requirement of packaging/pep517_backend/
125 | - types-setuptools # requirement of packaging/pep517_backend/
126 | - types-Pygments
127 | - types-colorama
128 | - pytest_codspeed==3.0.0
129 | args:
130 | - --python-version=3.11
131 | - --txt-report=.tox/.tmp/.mypy/python-3.11
132 | - --cobertura-xml-report=.tox/.tmp/.mypy/python-3.11
133 | - --html-report=.tox/.tmp/.mypy/python-3.11
134 | pass_filenames: false
135 |
136 | - repo: https://github.com/rhysd/actionlint.git
137 | rev: v1.7.7
138 | hooks:
139 | - id: actionlint-docker
140 | args:
141 | - -ignore
142 | - >- # https://github.com/rhysd/actionlint/issues/384
143 | ^type of expression at "float number value" must be number
144 | but found type string$
145 | - -ignore
146 | - >- # https://github.com/rhysd/actionlint/pull/380#issuecomment-2325391372
147 | ^input "attestations" is not defined in action
148 | "pypa/gh-action-pypi-publish@release/v1". available inputs are ".*"$
149 |
150 | ...
151 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | version: 2
4 |
5 | build:
6 | os: ubuntu-22.04
7 | tools:
8 | python: "3.11"
9 |
10 | jobs:
11 | post_create_environment:
12 | - >-
13 | pip install .
14 | --config-settings=pure-python=true
15 |
16 | python:
17 | install:
18 | - requirements: requirements/doc.txt
19 |
20 | sphinx:
21 | builder: dirhtml
22 | configuration: docs/conf.py
23 | fail_on_warning: true
24 |
25 | ...
26 |
--------------------------------------------------------------------------------
/.yamllint:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | extends: default
4 |
5 | rules:
6 | indentation:
7 | level: error
8 | indent-sequences: false
9 | truthy:
10 | allowed-values:
11 | - >-
12 | false
13 | - >-
14 | true
15 | - >- # Allow "on" key name in GHA CI/CD workflow definitions
16 | on
17 |
18 | ...
19 |
--------------------------------------------------------------------------------
/CHANGES/.TEMPLATE.rst:
--------------------------------------------------------------------------------
1 | {# TOWNCRIER TEMPLATE #}
2 |
3 | *({{ versiondata.date }})*
4 |
5 | {% for section, _ in sections.items() %}
6 | {% set underline = underlines[0] %}{% if section %}{{section}}
7 | {{ underline * section|length }}{% set underline = underlines[1] %}
8 |
9 | {% endif %}
10 |
11 | {% if sections[section] %}
12 | {% for category, val in definitions.items() if category in sections[section]%}
13 | {{ definitions[category]['name'] }}
14 | {{ underline * definitions[category]['name']|length }}
15 |
16 | {% if definitions[category]['showcontent'] %}
17 | {% for text, change_note_refs in sections[section][category].items() %}
18 | - {{ text }}
19 |
20 | {{- '\n' * 2 -}}
21 |
22 | {#-
23 | NOTE: Replacing 'e' with 'f' is a hack that prevents Jinja's `int`
24 | NOTE: filter internal implementation from treating the input as an
25 | NOTE: infinite float when it looks like a scientific notation (with a
26 | NOTE: single 'e' char in between digits), raising an `OverflowError`,
27 | NOTE: subsequently. 'f' is still a hex letter so it won't affect the
28 | NOTE: check for whether it's a (short or long) commit hash or not.
29 | Ref: https://github.com/pallets/jinja/issues/1921
30 | -#}
31 | {%-
32 | set pr_issue_numbers = change_note_refs
33 | | map('lower')
34 | | map('replace', 'e', 'f')
35 | | map('int', default=None)
36 | | select('integer')
37 | | map('string')
38 | | list
39 | -%}
40 | {%- set arbitrary_refs = [] -%}
41 | {%- set commit_refs = [] -%}
42 | {%- with -%}
43 | {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%}
44 | {%- for cf in commit_ref_candidates -%}
45 | {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%}
46 | {%- set _ = commit_refs.append(cf) -%}
47 | {%- else -%}
48 | {%- set _ = arbitrary_refs.append(cf) -%}
49 | {%- endif -%}
50 | {%- endfor -%}
51 | {%- endwith -%}
52 |
53 | {% if pr_issue_numbers %}
54 | *Related issues and pull requests on GitHub:*
55 | :issue:`{{ pr_issue_numbers | join('`, :issue:`') }}`.
56 | {{- '\n' * 2 -}}
57 | {%- endif -%}
58 |
59 | {% if commit_refs %}
60 | *Related commits on GitHub:*
61 | :commit:`{{ commit_refs | join('`, :commit:`') }}`.
62 | {{- '\n' * 2 -}}
63 | {%- endif -%}
64 |
65 | {% if arbitrary_refs %}
66 | *Unlinked references:*
67 | {{ arbitrary_refs | join(', ') }}.
68 | {{- '\n' * 2 -}}
69 | {%- endif -%}
70 |
71 | {% endfor %}
72 | {% else %}
73 | - {{ sections[section][category]['']|join(', ') }}
74 |
75 | {% endif %}
76 | {% if sections[section][category]|length == 0 %}
77 | No significant changes.
78 |
79 | {% else %}
80 | {% endif %}
81 |
82 | {% endfor %}
83 | {% else %}
84 | No significant changes.
85 |
86 |
87 | {% endif %}
88 | {% endfor %}
89 | ----
90 | {{ '\n' * 2 }}
91 |
--------------------------------------------------------------------------------
/CHANGES/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.TEMPLATE.rst
3 | !.gitignore
4 | !README.rst
5 | !*.bugfix
6 | !*.bugfix.rst
7 | !*.bugfix.*.rst
8 | !*.breaking
9 | !*.breaking.rst
10 | !*.breaking.*.rst
11 | !*.contrib
12 | !*.contrib.rst
13 | !*.contrib.*.rst
14 | !*.deprecation
15 | !*.deprecation.rst
16 | !*.deprecation.*.rst
17 | !*.doc
18 | !*.doc.rst
19 | !*.doc.*.rst
20 | !*.feature
21 | !*.feature.rst
22 | !*.feature.*.rst
23 | !*.misc
24 | !*.misc.rst
25 | !*.misc.*.rst
26 | !*.packaging
27 | !*.packaging.rst
28 | !*.packaging.*.rst
29 |
--------------------------------------------------------------------------------
/CHANGES/README.rst:
--------------------------------------------------------------------------------
1 | .. _Adding change notes with your PRs:
2 |
3 | Adding change notes with your PRs
4 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5 |
6 | It is very important to maintain a log for news of how
7 | updating to the new version of the software will affect
8 | end-users. This is why we enforce collection of the change
9 | fragment files in pull requests as per `Towncrier philosophy`_.
10 |
11 | The idea is that when somebody makes a change, they must record
12 | the bits that would affect end-users only including information
13 | that would be useful to them. Then, when the maintainers publish
14 | a new release, they'll automatically use these records to compose
15 | a change log for the respective version. It is important to
16 | understand that including unnecessary low-level implementation
17 | related details generates noise that is not particularly useful
18 | to the end-users most of the time. And so such details should be
19 | recorded in the Git history rather than a changelog.
20 |
21 | Alright! So how to add a news fragment?
22 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
23 |
24 | ``yarl`` uses `towncrier `_
25 | for changelog management.
26 | To submit a change note about your PR, add a text file into the
27 | ``CHANGES/`` folder. It should contain an
28 | explanation of what applying this PR will change in the way
29 | end-users interact with the project. One sentence is usually
30 | enough but feel free to add as many details as you feel necessary
31 | for the users to understand what it means.
32 |
33 | **Use the past tense** for the text in your fragment because,
34 | combined with others, it will be a part of the "news digest"
35 | telling the readers **what changed** in a specific version of
36 | the library *since the previous version*. You should also use
37 | *reStructuredText* syntax for highlighting code (inline or block),
38 | linking parts of the docs or external sites.
39 | However, you do not need to reference the issue or PR numbers here
40 | as *towncrier* will automatically add a reference to all of the
41 | affected issues when rendering the news file.
42 | If you wish to sign your change, feel free to add ``-- by
43 | :user:`github-username``` at the end (replace ``github-username``
44 | with your own!).
45 |
46 | Finally, name your file following the convention that Towncrier
47 | understands: it should start with the number of an issue or a
48 | PR followed by a dot, then add a patch type, like ``feature``,
49 | ``doc``, ``contrib`` etc., and add ``.rst`` as a suffix. If you
50 | need to add more than one fragment, you may add an optional
51 | sequence number (delimited with another period) between the type
52 | and the suffix.
53 |
54 | In general the name will follow ``..rst`` pattern,
55 | where the categories are:
56 |
57 | - ``bugfix``: A bug fix for something we deemed an improper undesired
58 | behavior that got corrected in the release to match pre-agreed
59 | expectations.
60 | - ``feature``: A new behavior, public APIs. That sort of stuff.
61 | - ``deprecation``: A declaration of future API removals and breaking
62 | changes in behavior.
63 | - ``breaking``: When something public gets removed in a breaking way.
64 | Could be deprecated in an earlier release.
65 | - ``doc``: Notable updates to the documentation structure or build
66 | process.
67 | - ``packaging``: Notes for downstreams about unobvious side effects
68 | and tooling. Changes in the test invocation considerations and
69 | runtime assumptions.
70 | - ``contrib``: Stuff that affects the contributor experience. e.g.
71 | Running tests, building the docs, setting up the development
72 | environment.
73 | - ``misc``: Changes that are hard to assign to any of the above
74 | categories.
75 |
76 | A pull request may have more than one of these components, for example
77 | a code change may introduce a new feature that deprecates an old
78 | feature, in which case two fragments should be added. It is not
79 | necessary to make a separate documentation fragment for documentation
80 | changes accompanying the relevant code changes.
81 |
82 | Examples for adding changelog entries to your Pull Requests
83 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
84 |
85 | File :file:`CHANGES/603.removal.1.rst`:
86 |
87 | .. code-block:: rst
88 |
89 | Dropped Python 3.5 support; Python 3.6 is the minimal supported Python
90 | version -- by :user:`webknjaz`.
91 |
92 | File :file:`CHANGES/550.bugfix.rst`:
93 |
94 | .. code-block:: rst
95 |
96 | Started shipping Windows wheels for the x86 architecture
97 | -- by :user:`Dreamsorcerer`.
98 |
99 | File :file:`CHANGES/553.feature.rst`:
100 |
101 | .. code-block:: rst
102 |
103 | Added support for ``GenericAliases`` (``MultiDict[str]``) under Python 3.9
104 | and higher -- by :user:`mjpieters`.
105 |
106 | .. tip::
107 |
108 | See :file:`towncrier.toml` for all available categories
109 | (``tool.towncrier.type``).
110 |
111 | .. _Towncrier philosophy:
112 | https://towncrier.readthedocs.io/en/stable/#philosophy
113 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include .coveragerc
2 | include pyproject.toml
3 | include pytest.ini
4 | include towncrier.toml
5 | include LICENSE
6 | include NOTICE
7 | include CHANGES.rst
8 | include README.rst
9 | graft yarl
10 | graft packaging
11 | graft docs
12 | graft CHANGES
13 | graft requirements
14 | graft tests
15 | global-exclude *.pyc
16 | global-exclude *.cache
17 | exclude yarl/*.c
18 | exclude yarl/*.html
19 | exclude yarl/*.so
20 | exclude yarl/*.pyd
21 | prune docs/_build
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PYXS = $(wildcard yarl/*.pyx)
2 | SRC = yarl tests
3 |
4 | all: test
5 |
6 |
7 | .install-deps: $(shell find requirements -type f)
8 | pip install -U -r requirements/dev.txt
9 | pre-commit install
10 | @touch .install-deps
11 |
12 |
13 | .install-cython: requirements/cython.txt
14 | pip install -r requirements/cython.txt
15 | touch .install-cython
16 |
17 |
18 | yarl/%.c: yarl/%.pyx
19 | python -m cython -3 -o $@ $< -I yarl
20 |
21 |
22 | .cythonize: .install-cython $(PYXS:.pyx=.c)
23 |
24 |
25 | cythonize: .cythonize
26 |
27 |
28 | .develop: .install-deps $(shell find yarl -type f)
29 | @pip install -e .
30 | @touch .develop
31 |
32 | fmt:
33 | ifdef CI
34 | pre-commit run --all-files --show-diff-on-failure
35 | else
36 | pre-commit run --all-files
37 | endif
38 |
39 | lint: fmt
40 |
41 | test: lint .develop
42 | pytest ./tests ./yarl
43 |
44 |
45 | vtest: lint .develop
46 | pytest ./tests ./yarl -v
47 |
48 |
49 | cov: lint .develop
50 | pytest --cov yarl --cov-report html --cov-report term ./tests/ ./yarl/
51 | @echo "open file://`pwd`/htmlcov/index.html"
52 |
53 |
54 | doc: doctest doc-spelling
55 | make -C docs html SPHINXOPTS="-W -E --keep-going -n"
56 | @echo "open file://`pwd`/docs/_build/html/index.html"
57 |
58 |
59 | doctest: .develop
60 | make -C docs doctest SPHINXOPTS="-W -E --keep-going -n"
61 |
62 |
63 | doc-spelling:
64 | make -C docs spelling SPHINXOPTS="-W -E --keep-going -n"
65 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Copyright 2016-2021, Andrew Svetlov and aio-libs team
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | yarl
2 | ====
3 |
4 | The module provides handy URL class for URL parsing and changing.
5 |
6 | .. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg
7 | :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI
8 | :align: right
9 |
10 | .. image:: https://codecov.io/gh/aio-libs/yarl/graph/badge.svg?flag=pytest
11 | :target: https://app.codecov.io/gh/aio-libs/yarl?flags[]=pytest
12 | :alt: Codecov coverage for the pytest-driven measurements
13 |
14 | .. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
15 | :target: https://codspeed.io/aio-libs/yarl
16 |
17 | .. image:: https://badge.fury.io/py/yarl.svg
18 | :target: https://badge.fury.io/py/yarl
19 |
20 | .. image:: https://readthedocs.org/projects/yarl/badge/?version=latest
21 | :target: https://yarl.aio-libs.org
22 |
23 | .. image:: https://img.shields.io/pypi/pyversions/yarl.svg
24 | :target: https://pypi.python.org/pypi/yarl
25 |
26 | .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
27 | :target: https://matrix.to/#/%23aio-libs:matrix.org
28 | :alt: Matrix Room — #aio-libs:matrix.org
29 |
30 | .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
31 | :target: https://matrix.to/#/%23aio-libs-space:matrix.org
32 | :alt: Matrix Space — #aio-libs-space:matrix.org
33 |
34 |
35 | Introduction
36 | ------------
37 |
38 | Url is constructed from ``str``:
39 |
40 | .. code-block:: pycon
41 |
42 | >>> from yarl import URL
43 | >>> url = URL('https://www.python.org/~guido?arg=1#frag')
44 | >>> url
45 | URL('https://www.python.org/~guido?arg=1#frag')
46 |
47 | All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*,
48 | *query* and *fragment* are accessible by properties:
49 |
50 | .. code-block:: pycon
51 |
52 | >>> url.scheme
53 | 'https'
54 | >>> url.host
55 | 'www.python.org'
56 | >>> url.path
57 | '/~guido'
58 | >>> url.query_string
59 | 'arg=1'
60 | >>> url.query
61 |
62 | >>> url.fragment
63 | 'frag'
64 |
65 | All url manipulations produce a new url object:
66 |
67 | .. code-block:: pycon
68 |
69 | >>> url = URL('https://www.python.org')
70 | >>> url / 'foo' / 'bar'
71 | URL('https://www.python.org/foo/bar')
72 | >>> url / 'foo' % {'bar': 'baz'}
73 | URL('https://www.python.org/foo?bar=baz')
74 |
75 | Strings passed to constructor and modification methods are
76 | automatically encoded giving canonical representation as result:
77 |
78 | .. code-block:: pycon
79 |
80 | >>> url = URL('https://www.python.org/шлях')
81 | >>> url
82 | URL('https://www.python.org/%D1%88%D0%BB%D1%8F%D1%85')
83 |
84 | Regular properties are *percent-decoded*, use ``raw_`` versions for
85 | getting *encoded* strings:
86 |
87 | .. code-block:: pycon
88 |
89 | >>> url.path
90 | '/шлях'
91 |
92 | >>> url.raw_path
93 | '/%D1%88%D0%BB%D1%8F%D1%85'
94 |
95 | Human readable representation of URL is available as ``.human_repr()``:
96 |
97 | .. code-block:: pycon
98 |
99 | >>> url.human_repr()
100 | 'https://www.python.org/шлях'
101 |
102 | For full documentation please read https://yarl.aio-libs.org.
103 |
104 |
105 | Installation
106 | ------------
107 |
108 | ::
109 |
110 | $ pip install yarl
111 |
112 | The library is Python 3 only!
113 |
114 | PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
115 | ``yarl`` on another operating system where wheels are not provided,
116 | the tarball will be used to compile the library from
117 | the source code. It requires a C compiler and and Python headers installed.
118 |
119 | To skip the compilation you must explicitly opt-in by using a PEP 517
120 | configuration setting ``pure-python``, or setting the ``YARL_NO_EXTENSIONS``
121 | environment variable to a non-empty value, e.g.:
122 |
123 | .. code-block:: console
124 |
125 | $ pip install yarl --config-settings=pure-python=false
126 |
127 | Please note that the pure-Python (uncompiled) version is much slower. However,
128 | PyPy always uses a pure-Python implementation, and, as such, it is unaffected
129 | by this variable.
130 |
131 | Dependencies
132 | ------------
133 |
134 | YARL requires multidict_ and propcache_ libraries.
135 |
136 |
137 | API documentation
138 | ------------------
139 |
140 | The documentation is located at https://yarl.aio-libs.org.
141 |
142 |
143 | Why isn't boolean supported by the URL query API?
144 | -------------------------------------------------
145 |
146 | There is no standard for boolean representation of boolean values.
147 |
148 | Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``,
149 | ``Y``/``N``, ``1``/``0``, etc.
150 |
151 | ``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because
152 | it is specific to how the end-user's application is built and would be different for
153 | different apps. The library doesn't accept booleans in the API; a user should convert
154 | bools into strings using own preferred translation protocol.
155 |
156 |
157 | Comparison with other URL libraries
158 | ------------------------------------
159 |
160 | * furl (https://pypi.python.org/pypi/furl)
161 |
162 | The library has rich functionality but the ``furl`` object is mutable.
163 |
164 | I'm afraid to pass this object into foreign code: who knows if the
165 | code will modify my url in a terrible way while I just want to send URL
166 | with handy helpers for accessing URL properties.
167 |
168 | ``furl`` has other non-obvious tricky things but the main objection
169 | is mutability.
170 |
171 | * URLObject (https://pypi.python.org/pypi/URLObject)
172 |
173 | URLObject is immutable, that's pretty good.
174 |
175 | Every URL change generates a new URL object.
176 |
177 | But the library doesn't do any decode/encode transformations leaving the
178 | end user to cope with these gory details.
179 |
180 |
181 | Source code
182 | -----------
183 |
184 | The project is hosted on GitHub_
185 |
186 | Please file an issue on the `bug tracker
187 | `_ if you have found a bug
188 | or have some suggestion in order to improve the library.
189 |
190 | Discussion list
191 | ---------------
192 |
193 | *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
194 |
195 | Feel free to post your questions and ideas here.
196 |
197 |
198 | Authors and License
199 | -------------------
200 |
201 | The ``yarl`` package is written by Andrew Svetlov.
202 |
203 | It's *Apache 2* licensed and freely available.
204 |
205 |
206 | .. _GitHub: https://github.com/aio-libs/yarl
207 |
208 | .. _multidict: https://github.com/aio-libs/multidict
209 |
210 | .. _propcache: https://github.com/aio-libs/propcache
211 |
--------------------------------------------------------------------------------
/benchmark.py:
--------------------------------------------------------------------------------
1 | import timeit
2 |
3 | cython_setup = """\
4 | from yarl._quoting_c import _Quoter as Quoter
5 | from yarl._quoting_c import _Unquoter as Unquoter
6 | """
7 |
8 | python_setup = """\
9 | from yarl._quoting_py import _Quoter as Quoter
10 | from yarl._quoting_py import _Unquoter as Unquoter
11 | """
12 |
13 |
14 | print(
15 | "Cython quote ascii: {:.3f} sec".format(
16 | timeit.timeit("q(s)", cython_setup + "s='/path/to';q=Quoter(safe='/')")
17 | )
18 | )
19 |
20 |
21 | print(
22 | "Python quote ascii: {:.3f} sec".format(
23 | timeit.timeit("q(s)", python_setup + "s='/path/to';q=Quoter(safe='/')")
24 | )
25 | )
26 |
27 |
28 | print(
29 | "Cython quote PCT: {:.3f} sec".format(
30 | timeit.timeit("q(s)", cython_setup + "s='abc%0a';q=Quoter()")
31 | )
32 | )
33 |
34 |
35 | print(
36 | "Python quote PCT: {:.3f} sec".format(
37 | timeit.timeit("q(s)", python_setup + "s='abc%0a';q=Quoter()")
38 | )
39 | )
40 |
41 |
42 | print(
43 | "Cython quote: {:.3f} sec".format(
44 | timeit.timeit("q(s)", cython_setup + "s='/шлях/файл';q=Quoter()")
45 | )
46 | )
47 |
48 |
49 | print(
50 | "Python quote: {:.3f} sec".format(
51 | timeit.timeit("q(s)", python_setup + "s='/шлях/файл';q=Quoter()")
52 | )
53 | )
54 |
55 |
56 | print(
57 | "Cython unquote: {:.3f} sec".format(
58 | timeit.timeit("u(s)", cython_setup + "s='/path/to';u=Unquoter()")
59 | )
60 | )
61 |
62 |
63 | print(
64 | "Python unquote: {:.3f} sec".format(
65 | timeit.timeit("u(s)", python_setup + "s='/path/to';u=Unquoter()")
66 | )
67 | )
68 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help
18 | help:
19 | @echo "Please use \`make ' where is one of"
20 | @echo " html to make standalone HTML files"
21 | @echo " dirhtml to make HTML files named index.html in directories"
22 | @echo " singlehtml to make a single large HTML file"
23 | @echo " pickle to make pickle files"
24 | @echo " json to make JSON files"
25 | @echo " htmlhelp to make HTML files and a HTML help project"
26 | @echo " qthelp to make HTML files and a qthelp project"
27 | @echo " applehelp to make an Apple Help Book"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " epub3 to make an epub3"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
34 | @echo " text to make text files"
35 | @echo " man to make manual pages"
36 | @echo " texinfo to make Texinfo files"
37 | @echo " info to make Texinfo files and run them through makeinfo"
38 | @echo " gettext to make PO message catalogs"
39 | @echo " changes to make an overview of all changed/added/deprecated items"
40 | @echo " xml to make Docutils-native XML files"
41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
42 | @echo " linkcheck to check all external links for integrity"
43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
44 | @echo " coverage to run coverage check of the documentation (if enabled)"
45 | @echo " dummy to check syntax errors of document sources"
46 |
47 | .PHONY: clean
48 | clean:
49 | rm -rf $(BUILDDIR)/*
50 |
51 | .PHONY: html
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | .PHONY: dirhtml
58 | dirhtml:
59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
60 | @echo
61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
62 |
63 | .PHONY: singlehtml
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | .PHONY: pickle
70 | pickle:
71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
72 | @echo
73 | @echo "Build finished; now you can process the pickle files."
74 |
75 | .PHONY: json
76 | json:
77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
78 | @echo
79 | @echo "Build finished; now you can process the JSON files."
80 |
81 | .PHONY: htmlhelp
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | .PHONY: qthelp
89 | qthelp:
90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
91 | @echo
92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/yarl.qhcp"
95 | @echo "To view the help file:"
96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/yarl.qhc"
97 |
98 | .PHONY: applehelp
99 | applehelp:
100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
101 | @echo
102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
103 | @echo "N.B. You won't be able to view it unless you put it in" \
104 | "~/Library/Documentation/Help or install it in your application" \
105 | "bundle."
106 |
107 | .PHONY: devhelp
108 | devhelp:
109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
110 | @echo
111 | @echo "Build finished."
112 | @echo "To view the help file:"
113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/yarl"
114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/yarl"
115 | @echo "# devhelp"
116 |
117 | .PHONY: epub
118 | epub:
119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
120 | @echo
121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
122 |
123 | .PHONY: epub3
124 | epub3:
125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
126 | @echo
127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
128 |
129 | .PHONY: latex
130 | latex:
131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
132 | @echo
133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
135 | "(use \`make latexpdf' here to do that automatically)."
136 |
137 | .PHONY: latexpdf
138 | latexpdf:
139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
140 | @echo "Running LaTeX files through pdflatex..."
141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
143 |
144 | .PHONY: latexpdfja
145 | latexpdfja:
146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
147 | @echo "Running LaTeX files through platex and dvipdfmx..."
148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
150 |
151 | .PHONY: text
152 | text:
153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
154 | @echo
155 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
156 |
157 | .PHONY: man
158 | man:
159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
160 | @echo
161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
162 |
163 | .PHONY: texinfo
164 | texinfo:
165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
166 | @echo
167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
168 | @echo "Run \`make' in that directory to run these through makeinfo" \
169 | "(use \`make info' here to do that automatically)."
170 |
171 | .PHONY: info
172 | info:
173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
174 | @echo "Running Texinfo files through makeinfo..."
175 | make -C $(BUILDDIR)/texinfo info
176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
177 |
178 | .PHONY: gettext
179 | gettext:
180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
181 | @echo
182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
183 |
184 | .PHONY: changes
185 | changes:
186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
187 | @echo
188 | @echo "The overview file is in $(BUILDDIR)/changes."
189 |
190 | .PHONY: linkcheck
191 | linkcheck:
192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
193 | @echo
194 | @echo "Link check complete; look for any errors in the above output " \
195 | "or in $(BUILDDIR)/linkcheck/output.txt."
196 |
197 | .PHONY: doctest
198 | doctest:
199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
200 | @echo "Testing of doctests in the sources finished, look at the " \
201 | "results in $(BUILDDIR)/doctest/output.txt."
202 |
203 | .PHONY: coverage
204 | coverage:
205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
206 | @echo "Testing of coverage in the sources finished, look at the " \
207 | "results in $(BUILDDIR)/coverage/python.txt."
208 |
209 | .PHONY: xml
210 | xml:
211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
212 | @echo
213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
214 |
215 | .PHONY: pseudoxml
216 | pseudoxml:
217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
218 | @echo
219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
220 |
221 | .PHONY: dummy
222 | dummy:
223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
224 | @echo
225 | @echo "Build finished. Dummy builder generates no files."
226 |
227 | spelling:
228 | $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling
229 | @echo
230 | @echo "Build finished."
231 |
--------------------------------------------------------------------------------
/docs/_static/yarl-icon-128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aio-libs/yarl/b1ce3027d0d1ae790337af3d569345c42dbfcdee/docs/_static/yarl-icon-128x128.png
--------------------------------------------------------------------------------
/docs/changes.rst:
--------------------------------------------------------------------------------
1 | .. _yarl_changes:
2 |
3 | =========
4 | Changelog
5 | =========
6 |
7 | .. only:: not is_release
8 |
9 | To be included in v\ |release| (if present)
10 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
11 |
12 | .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT]
13 |
14 | Released versions
15 | ^^^^^^^^^^^^^^^^^
16 |
17 | .. include:: ../CHANGES.rst
18 | :start-after: .. towncrier release notes start
19 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | #
3 | # yarl documentation build configuration file, created by
4 | # sphinx-quickstart on Mon Aug 29 19:55:36 2016.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another directory,
16 | # add these directories to sys.path here. If the directory is relative to the
17 | # documentation root, use os.path.abspath to make it absolute, like shown here.
18 | #
19 | # import os
20 | # import sys
21 | # sys.path.insert(0, os.path.abspath('.'))
22 |
23 | # -- General configuration ------------------------------------------------
24 |
25 | # If your documentation needs a minimal Sphinx version, state it here.
26 | #
27 | # needs_sphinx = '1.0'
28 |
29 | import os
30 | import re
31 | from pathlib import Path
32 |
33 | PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve()
34 | IS_RELEASE_ON_RTD = (
35 | os.getenv("READTHEDOCS", "False") == "True"
36 | and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag"
37 | )
38 | if IS_RELEASE_ON_RTD:
39 | tags.add("is_release")
40 |
41 |
42 | _docs_path = Path(__file__).parent
43 | _version_path = _docs_path / "../yarl/__init__.py"
44 |
45 |
46 | with _version_path.open() as fp:
47 | try:
48 | _version_info = re.search(
49 | r"^__version__ = \""
50 | r"(?P\d+)"
51 | r"\.(?P\d+)"
52 | r"\.(?P\d+)"
53 | r"(?P.*)?\"$",
54 | fp.read(),
55 | re.M,
56 | ).groupdict()
57 | except IndexError:
58 | raise RuntimeError("Unable to determine version.")
59 |
60 | # Add any Sphinx extension module names here, as strings. They can be
61 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
62 | # ones.
63 | extensions = [
64 | # stdlib-party extensions:
65 | "sphinx.ext.extlinks",
66 | "sphinx.ext.intersphinx",
67 | "sphinx.ext.coverage",
68 | "sphinx.ext.doctest",
69 | "sphinx.ext.viewcode",
70 | # Third-party extensions:
71 | "alabaster",
72 | "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive
73 | "myst_parser", # extended markdown; https://pypi.org/project/myst-parser/
74 | ]
75 |
76 |
77 | try:
78 | import sphinxcontrib.spelling # noqa
79 |
80 | extensions.append("sphinxcontrib.spelling")
81 | except ImportError:
82 | pass
83 |
84 | intersphinx_mapping = {
85 | "python": ("https://docs.python.org/3", None),
86 | "multidict": ("https://multidict.aio-libs.org/en/stable", None),
87 | "propcache": ("https://propcache.aio-libs.org/en/stable", None),
88 | }
89 |
90 |
91 | # Add any paths that contain templates here, relative to this directory.
92 | # templates_path = ['_templates']
93 |
94 | # The suffix(es) of source filenames.
95 | # You can specify multiple suffix as a list of string:
96 | #
97 | # source_suffix = ['.rst', '.md']
98 |
99 | # The encoding of source files.
100 | #
101 | # source_encoding = 'utf-8-sig'
102 |
103 | # The master toctree document.
104 | master_doc = "index"
105 |
106 | # -- Project information -----------------------------------------------------
107 |
108 | github_url = "https://github.com"
109 | github_repo_org = "aio-libs"
110 | github_repo_name = "yarl"
111 | github_repo_slug = f"{github_repo_org}/{github_repo_name}"
112 | github_repo_url = f"{github_url}/{github_repo_slug}"
113 | github_sponsors_url = f"{github_url}/sponsors"
114 |
115 | project = github_repo_name
116 | copyright = f"2016, Andrew Svetlov, {project} contributors and aio-libs team"
117 | author = "Andrew Svetlov and aio-libs team"
118 |
119 | # The version info for the project you're documenting, acts as replacement for
120 | # |version| and |release|, also used in various other places throughout the
121 | # built documents.
122 | #
123 | # The short X.Y version.
124 | version = "{major}.{minor}".format(**_version_info)
125 | # The full version, including alpha/beta/rc tags.
126 | release = "{major}.{minor}.{patch}-{tag}".format(**_version_info)
127 |
128 | rst_epilog = f"""
129 | .. |project| replace:: {project}
130 | """ # pylint: disable=invalid-name
131 |
132 | # The language for content autogenerated by Sphinx. Refer to documentation
133 | # for a list of supported languages.
134 | #
135 | # This is also used if you do content translation via gettext catalogs.
136 | # Usually you set "language" from the command line for these cases.
137 | language = "en"
138 |
139 | # There are two options for replacing |today|: either, you set today to some
140 | # non-false value, then it is used:
141 | #
142 | # today = ''
143 | #
144 | # Else, today_fmt is used as the format for a strftime call.
145 | #
146 | # today_fmt = '%B %d, %Y'
147 |
148 | # List of patterns, relative to source directory, that match files and
149 | # directories to ignore when looking for source files.
150 | # This patterns also effect to html_static_path and html_extra_path
151 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
152 |
153 | # The reST default role (used for this markup: `text`) to use for all
154 | # documents.
155 | #
156 | # default_role = None
157 |
158 | # If true, '()' will be appended to :func: etc. cross-reference text.
159 | #
160 | # add_function_parentheses = True
161 |
162 | # If true, the current module name will be prepended to all description
163 | # unit titles (such as .. function::).
164 | #
165 | # add_module_names = True
166 |
167 | # If true, sectionauthor and moduleauthor directives will be shown in the
168 | # output. They are ignored by default.
169 | #
170 | # show_authors = False
171 |
172 | # The name of the Pygments (syntax highlighting) style to use.
173 | pygments_style = "sphinx"
174 |
175 | # A list of ignored prefixes for module index sorting.
176 | # modindex_common_prefix = []
177 |
178 | # If true, keep warnings as "system message" paragraphs in the built documents.
179 | # keep_warnings = False
180 |
181 | # If true, `todo` and `todoList` produce output, else they produce nothing.
182 | todo_include_todos = False
183 |
184 | # -- Extension configuration -------------------------------------------------
185 |
186 | # -- Options for extlinks extension ---------------------------------------
187 | extlinks = {
188 | "issue": (f"{github_repo_url}/issues/%s", "#%s"),
189 | "pr": (f"{github_repo_url}/pull/%s", "PR #%s"),
190 | "commit": (f"{github_repo_url}/commit/%s", "%s"),
191 | "gh": (f"{github_url}/%s", "GitHub: %s"),
192 | "user": (f"{github_sponsors_url}/%s", "@%s"),
193 | }
194 |
195 |
196 | # -- Options for HTML output ----------------------------------------------
197 |
198 | # The theme to use for HTML and HTML Help pages. See the documentation for
199 | # a list of builtin themes.
200 | #
201 | html_theme = "alabaster"
202 |
203 | html_theme_options = {
204 | "logo": "yarl-icon-128x128.png",
205 | "description": "Yet another URL library",
206 | "github_user": "aio-libs",
207 | "github_repo": "yarl",
208 | "github_button": True,
209 | "github_type": "star",
210 | "github_banner": True,
211 | "codecov_button": True,
212 | "pre_bg": "#FFF6E5",
213 | "note_bg": "#E5ECD1",
214 | "note_border": "#BFCF8C",
215 | "body_text": "#482C0A",
216 | "sidebar_text": "#49443E",
217 | "sidebar_header": "#4B4032",
218 | "sidebar_collapse": False,
219 | }
220 |
221 | # Theme options are theme-specific and customize the look and feel of a theme
222 | # further. For a list of options available for each theme, see the
223 | # documentation.
224 | #
225 | # html_theme_options = {}
226 |
227 | # Add any paths that contain custom themes here, relative to this directory.
228 | # html_theme_path = []
229 |
230 | # The name for this set of Sphinx documents.
231 | # " v documentation" by default.
232 | #
233 | # html_title = 'yarl v0.1.0'
234 |
235 | # A shorter title for the navigation bar. Default is the same as html_title.
236 | #
237 | # html_short_title = None
238 |
239 | # The name of an image file (relative to this directory) to place at the top
240 | # of the sidebar.
241 | #
242 | # html_logo = None
243 |
244 | # The name of an image file (relative to this directory) to use as a favicon of
245 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
246 | # pixels large.
247 | #
248 | # html_favicon = None
249 |
250 | # Add any paths that contain custom static files (such as style sheets) here,
251 | # relative to this directory. They are copied after the builtin static files,
252 | # so a file named "default.css" will overwrite the builtin "default.css".
253 | html_static_path = ["_static"]
254 |
255 | # Add any extra paths that contain custom files (such as robots.txt or
256 | # .htaccess) here, relative to this directory. These files are copied
257 | # directly to the root of the documentation.
258 | #
259 | # html_extra_path = []
260 |
261 | # If not None, a 'Last updated on:' timestamp is inserted at every page
262 | # bottom, using the given strftime format.
263 | # The empty string is equivalent to '%b %d, %Y'.
264 | #
265 | # html_last_updated_fmt = None
266 |
267 | # If true, SmartyPants will be used to convert quotes and dashes to
268 | # typographically correct entities.
269 | #
270 | # html_use_smartypants = True
271 |
272 | # Custom sidebar templates, maps document names to template names.
273 | #
274 | # html_sidebars = {}
275 |
276 | html_sidebars = {
277 | "**": [
278 | "about.html",
279 | "navigation.html",
280 | "searchbox.html",
281 | ]
282 | }
283 |
284 | # Additional templates that should be rendered to pages, maps page names to
285 | # template names.
286 | #
287 | # html_additional_pages = {}
288 |
289 | # If false, no module index is generated.
290 | #
291 | # html_domain_indices = True
292 |
293 | # If false, no index is generated.
294 | #
295 | # html_use_index = True
296 |
297 | # If true, the index is split into individual pages for each letter.
298 | #
299 | # html_split_index = False
300 |
301 | # If true, links to the reST sources are added to the pages.
302 | #
303 | # html_show_sourcelink = True
304 |
305 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
306 | #
307 | # html_show_sphinx = True
308 |
309 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
310 | #
311 | # html_show_copyright = True
312 |
313 | # If true, an OpenSearch description file will be output, and all pages will
314 | # contain a tag referring to it. The value of this option must be the
315 | # base URL from which the finished HTML is served.
316 | #
317 | # html_use_opensearch = ''
318 |
319 | # This is the file name suffix for HTML files (e.g. ".xhtml").
320 | # html_file_suffix = None
321 |
322 | # Language to be used for generating the HTML full-text search index.
323 | # Sphinx supports the following languages:
324 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
325 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
326 | #
327 | # html_search_language = 'en'
328 |
329 | # A dictionary with options for the search language support, empty by default.
330 | # 'ja' uses this config value.
331 | # 'zh' user can custom change `jieba` dictionary path.
332 | #
333 | # html_search_options = {'type': 'default'}
334 |
335 | # The name of a javascript file (relative to the configuration directory) that
336 | # implements a search results scorer. If empty, the default will be used.
337 | #
338 | # html_search_scorer = 'scorer.js'
339 |
340 | # Output file base name for HTML help builder.
341 | htmlhelp_basename = "yarldoc"
342 |
343 | # -- Options for LaTeX output ---------------------------------------------
344 |
345 | latex_elements = {
346 | # The paper size ('letterpaper' or 'a4paper').
347 | #
348 | # 'papersize': 'letterpaper',
349 | # The font size ('10pt', '11pt' or '12pt').
350 | #
351 | # 'pointsize': '10pt',
352 | # Additional stuff for the LaTeX preamble.
353 | #
354 | # 'preamble': '',
355 | # Latex figure (float) alignment
356 | #
357 | # 'figure_align': 'htbp',
358 | }
359 |
360 | # Grouping the document tree into LaTeX files. List of tuples
361 | # (source start file, target name, title,
362 | # author, documentclass [howto, manual, or own class]).
363 | latex_documents = [
364 | (master_doc, "yarl.tex", "yarl Documentation", "Andrew Svetlov", "manual"),
365 | ]
366 |
367 | # The name of an image file (relative to this directory) to place at the top of
368 | # the title page.
369 | #
370 | # latex_logo = None
371 |
372 | # For "manual" documents, if this is true, then toplevel headings are parts,
373 | # not chapters.
374 | #
375 | # latex_use_parts = False
376 |
377 | # If true, show page references after internal links.
378 | #
379 | # latex_show_pagerefs = False
380 |
381 | # If true, show URL addresses after external links.
382 | #
383 | # latex_show_urls = False
384 |
385 | # Documents to append as an appendix to all manuals.
386 | #
387 | # latex_appendices = []
388 |
389 | # It false, will not define \strong, \code, itleref, \crossref ... but only
390 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
391 | # packages.
392 | #
393 | # latex_keep_old_macro_names = True
394 |
395 | # If false, no module index is generated.
396 | #
397 | # latex_domain_indices = True
398 |
399 |
400 | # -- Options for manual page output ---------------------------------------
401 |
402 | # One entry per manual page. List of tuples
403 | # (source start file, name, description, authors, manual section).
404 | man_pages = [(master_doc, "yarl", "yarl Documentation", [author], 1)]
405 |
406 | # If true, show URL addresses after external links.
407 | #
408 | # man_show_urls = False
409 |
410 |
411 | # -- Options for Texinfo output -------------------------------------------
412 |
413 | # Grouping the document tree into Texinfo files. List of tuples
414 | # (source start file, target name, title, author,
415 | # dir menu entry, description, category)
416 | texinfo_documents = [
417 | (
418 | master_doc,
419 | "yarl",
420 | "yarl Documentation",
421 | author,
422 | "yarl",
423 | "One line description of project.",
424 | "Miscellaneous",
425 | ),
426 | ]
427 |
428 | # Documents to append as an appendix to all manuals.
429 | #
430 | # texinfo_appendices = []
431 |
432 | # If false, no module index is generated.
433 | #
434 | # texinfo_domain_indices = True
435 |
436 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
437 | #
438 | # texinfo_show_urls = 'footnote'
439 |
440 | # If true, do not generate a @detailmenu in the "Top" node's menu.
441 | #
442 | # texinfo_no_detailmenu = False
443 |
444 | default_role = "any"
445 | nitpicky = True
446 | nitpick_ignore = [
447 | ("envvar", "TMPDIR"),
448 | ]
449 |
450 | # -- Options for towncrier_draft extension -----------------------------------
451 |
452 | towncrier_draft_autoversion_mode = "draft" # or: 'sphinx-version', 'sphinx-release'
453 | towncrier_draft_include_empty = True
454 | towncrier_draft_working_directory = PROJECT_ROOT_DIR
455 | # Not yet supported: towncrier_draft_config_path = 'pyproject.toml' # relative to cwd
456 |
--------------------------------------------------------------------------------
/docs/contributing/guidelines.rst:
--------------------------------------------------------------------------------
1 | -----------------
2 | Contributing docs
3 | -----------------
4 |
5 | We use Sphinx_ to generate our docs website. You can trigger
6 | the process locally by executing:
7 |
8 | .. code-block:: shell-session
9 |
10 | $ make doc
11 |
12 | It is also integrated with `Read The Docs`_ that builds and
13 | publishes each commit to the main branch and generates live
14 | docs previews for each pull request.
15 |
16 | The sources of the Sphinx_ documents use reStructuredText as a
17 | de-facto standard. But in order to make contributing docs more
18 | beginner-friendly, we've integrated `MyST parser`_ allowing us
19 | to also accept new documents written in an extended version of
20 | Markdown that supports using Sphinx directives and roles. `Read
21 | the docs `_ to learn more on how to use it.
22 |
23 | .. _MyST docs: https://myst-parser.readthedocs.io/en/latest/using/intro.html#writing-myst-in-sphinx
24 | .. _MyST parser: https://pypi.org/project/myst-parser/
25 | .. _Read The Docs: https://readthedocs.org
26 | .. _Sphinx: https://www.sphinx-doc.org
27 |
28 | .. include:: ../../CHANGES/README.rst
29 |
--------------------------------------------------------------------------------
/docs/contributing/release_guide.rst:
--------------------------------------------------------------------------------
1 | *************
2 | Release Guide
3 | *************
4 |
5 | Welcome to the |project| Release Guide!
6 |
7 | This page contains information on how to release a new version
8 | of |project| using the automated Continuous Delivery pipeline.
9 |
10 | .. tip::
11 |
12 | The intended audience for this document is maintainers
13 | and core contributors.
14 |
15 |
16 | Pre-release activities
17 | ======================
18 |
19 | 1. Check if there are any open Pull Requests that could be
20 | desired in the upcoming release. If there are any — merge
21 | them. If some are incomplete, try to get them ready.
22 | Don't forget to review the enclosed change notes per our
23 | guidelines.
24 | 2. Visually inspect the draft section of the :ref:`Changelog`
25 | page. Make sure the content looks consistent, uses the same
26 | writing style, targets the end-users and adheres to our
27 | documented guidelines.
28 | Most of the changelog sections will typically use the past
29 | tense or another way to relay the effect of the changes for
30 | the users, since the previous release.
31 | It should not target core contributors as the information
32 | they are normally interested in is already present in the
33 | Git history.
34 | Update the changelog fragments if you see any problems with
35 | this changelog section.
36 | 3. Optionally, test the previously published nightlies, that are
37 | available through GitHub Actions CI/CD artifacts, locally.
38 | 4. If you are satisfied with the above, inspect the changelog
39 | section categories in the draft. Presence of the breaking
40 | changes or features will hint you what version number
41 | segment to bump for the release.
42 | 5. Update the hardcoded version string in :file:`yarl/__init__.py`.
43 | Generate a new changelog from the fragments, and commit it
44 | along with the fragments removal and the Python module changes.
45 | Use the following commands, don't prepend a leading-``v`` before
46 | the version number. Just use the raw version number as per
47 | :pep:`440`.
48 |
49 | .. code-block:: shell-session
50 |
51 | [dir:yarl] $ yarl/__init__.py
52 | [dir:yarl] $ python -m towncrier build \
53 | -- --version 'VERSION_WITHOUT_LEADING_V'
54 | [dir:yarl] $ git commit -v CHANGES{.rst,/} yarl/__init__.py
55 |
56 | .. seealso::
57 |
58 | :ref:`Adding change notes with your PRs`
59 | Writing beautiful changelogs for humans
60 |
61 |
62 | The release stage
63 | =================
64 |
65 | 1. Tag the commit with version and changelog changes, created
66 | during the preparation stage. If possible, make it GPG-signed.
67 | Prepend a leading ``v`` before the version number for the tag
68 | name. Add an extra sentence describing the release contents,
69 | in a few words.
70 |
71 | .. code-block:: shell-session
72 |
73 | [dir:yarl] $ git tag \
74 | -s 'VERSION_WITH_LEADING_V' \
75 | -m 'VERSION_WITH_LEADING_V' \
76 | -m 'This release does X and Y.'
77 |
78 |
79 | 2. Push that tag to the upstream repository, which ``origin`` is
80 | considered to be in the example below.
81 |
82 | .. code-block:: shell-session
83 |
84 | [dir:yarl] $ git push origin 'VERSION_WITH_LEADING_V'
85 |
86 | 3. You can open the `GitHub Actions CI/CD workflow page `_ in your web browser to monitor the
88 | progress. But generally, you don't need to babysit the CI.
89 | 4. Check that web page or your email inbox for the notification
90 | with an approval request. GitHub will send it when it reaches
91 | the final "publishing" job.
92 | 5. Approve the deployment and wait for the CD workflow to complete.
93 | 6. Verify that the following things got created:
94 | - a PyPI release
95 | - a Git tag
96 | - a GitHub Releases page
97 | 7. Tell everyone you released a new version of |project| :)
98 | Depending on your mental capacity and the burnout stage, you
99 | are encouraged to post the updates in issues asking for the
100 | next release, contributed PRs, Bluesky, Twitter etc. You can
101 | also call out prominent contributors and thank them!
102 |
103 |
104 | .. _GitHub Actions CI/CD workflow:
105 | https://github.com/aio-libs/yarl/actions/workflows/ci-cd.yml
106 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. yarl documentation master file, created by
2 | sphinx-quickstart on Mon Aug 29 19:55:36 2016.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | yarl
7 | ====
8 |
9 | The module provides handy :class:`~yarl.URL` class for URL parsing and
10 | changing.
11 |
12 | Introduction
13 | ------------
14 |
15 | URL is constructed from :class:`str`:
16 |
17 | .. doctest::
18 |
19 | >>> from yarl import URL
20 | >>> url = URL('https://www.python.org/~guido?arg=1#frag')
21 | >>> url
22 | URL('https://www.python.org/~guido?arg=1#frag')
23 |
24 | All URL parts: *scheme*, *user*, *password*, *host*, *port*, *path*,
25 | *query* and *fragment* are accessible by properties:
26 |
27 | .. doctest::
28 |
29 | >>> url.scheme
30 | 'https'
31 | >>> url.host
32 | 'www.python.org'
33 | >>> url.path
34 | '/~guido'
35 | >>> url.query_string
36 | 'arg=1'
37 | >>> url.query
38 |
39 | >>> url.fragment
40 | 'frag'
41 |
42 | All URL manipulations produces a new URL object:
43 |
44 | .. doctest::
45 |
46 | >>> url.parent / 'downloads/source'
47 | URL('https://www.python.org/downloads/source')
48 |
49 | A URL object can be modified with ``/`` and ``%`` operators:
50 |
51 | .. doctest::
52 |
53 | >>> url = URL('https://www.python.org')
54 | >>> url / 'foo' / 'bar'
55 | URL('https://www.python.org/foo/bar')
56 | >>> url / 'foo' % {'bar': 'baz'}
57 | URL('https://www.python.org/foo?bar=baz')
58 |
59 | Strings passed to constructor and modification methods are
60 | automatically encoded giving canonical representation as result:
61 |
62 | .. doctest::
63 |
64 | >>> url = URL('https://www.python.org/шлях')
65 | >>> url
66 | URL('https://www.python.org/%D1%88%D0%BB%D1%8F%D1%85')
67 |
68 | Regular properties are *percent-decoded*, use ``raw_`` versions for
69 | getting *encoded* strings:
70 |
71 | .. doctest::
72 |
73 | >>> url.path
74 | '/шлях'
75 |
76 | >>> url.raw_path
77 | '/%D1%88%D0%BB%D1%8F%D1%85'
78 |
79 | Human readable representation of URL is available as :meth:`~yarl.URL.human_repr`:
80 |
81 | .. doctest::
82 |
83 | >>> url.human_repr()
84 | 'https://www.python.org/шлях'
85 |
86 | For full documentation please read :ref:`yarl-api` section.
87 |
88 |
89 | Installation
90 | ------------
91 |
92 | ::
93 |
94 | $ pip install yarl
95 |
96 | The library is Python 3 only!
97 |
98 | PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
99 | ``yarl`` on another operating system (like *Alpine Linux*, which is not
100 | manylinux-compliant because of the missing glibc and therefore, cannot be
101 | used with our wheels) the the tarball will be used to compile the library from
102 | the source code. It requires a C compiler and and Python headers installed.
103 |
104 | To skip the compilation you must explicitly opt-in by using a PEP 517
105 | configuration setting ``pure-python``, or setting the ``YARL_NO_EXTENSIONS``
106 | environment variable to a non-empty value, e.g.:
107 |
108 | .. code-block:: console
109 |
110 | $ pip install yarl --config-settings=pure-python=false
111 |
112 | Please note that the pure-Python (uncompiled) version is much slower. However,
113 | PyPy always uses a pure-Python implementation, and, as such, it is unaffected
114 | by this variable.
115 |
116 | Dependencies
117 | ------------
118 |
119 | ``yarl`` requires the :mod:`multidict` and :mod:`propcache ` libraries.
120 |
121 | It installs it automatically.
122 |
123 |
124 | API documentation
125 | ------------------
126 |
127 | Open :ref:`yarl-api` for reading full list of available methods.
128 |
129 |
130 | Comparison with other URL libraries
131 | ------------------------------------
132 |
133 | * furl (https://pypi.python.org/pypi/furl)
134 |
135 | The library has a rich functionality but ``furl`` object is mutable.
136 |
137 | I afraid to pass this object into foreign code: who knows if the
138 | code will modify my URL in a terrible way while I just want to send URL
139 | with handy helpers for accessing URL properties.
140 |
141 | ``furl`` has other non obvious tricky things but the main objection
142 | is mutability.
143 |
144 | * URLObject (https://pypi.python.org/pypi/URLObject)
145 |
146 | URLObject is immutable, that's pretty good.
147 |
148 | Every URL change generates a new URL object.
149 |
150 | But the library doesn't any decode/encode transformations leaving end
151 | user to cope with these gory details.
152 |
153 |
154 | .. _yarl-bools-support:
155 |
156 | Why isn't boolean supported by the URL query API?
157 | -------------------------------------------------
158 |
159 | There is no standard for boolean representation of boolean values.
160 |
161 | Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``,
162 | ``Y``/``N``, ``1``/``0``, etc.
163 |
164 | ``yarl`` cannot make an unambiguous decision on how to serialize :class:`bool` values
165 | because it is specific to how the end-user's application is built and would be different
166 | for different apps. The library doesn't accept booleans in the API; a user should
167 | convert bools into strings using own preferred translation protocol.
168 |
169 | Source code
170 | -----------
171 |
172 | The project is hosted on GitHub_
173 |
174 | Please file an issue on the `bug tracker
175 | `_ if you have found a bug
176 | or have some suggestion in order to improve the library.
177 |
178 | Discussion list
179 | ---------------
180 |
181 | *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
182 |
183 | Feel free to post your questions and ideas here.
184 |
185 |
186 | Authors and License
187 | -------------------
188 |
189 | The ``yarl`` package is written by Andrew Svetlov.
190 |
191 | It's *Apache 2* licensed and freely available.
192 |
193 |
194 |
195 | Contents:
196 |
197 | .. toctree::
198 | :maxdepth: 2
199 |
200 | api
201 |
202 | .. toctree::
203 | :caption: What's new
204 |
205 | changes
206 |
207 | .. toctree::
208 | :caption: Contributing
209 |
210 | contributing/guidelines
211 |
212 | .. toctree::
213 | :caption: Maintenance
214 |
215 | contributing/release_guide
216 |
217 |
218 | Indices and tables
219 | ==================
220 |
221 | * :ref:`genindex`
222 | * :ref:`modindex`
223 | * :ref:`search`
224 |
225 |
226 | .. _GitHub: https://github.com/aio-libs/yarl
227 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. epub3 to make an epub3
31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
32 | echo. text to make text files
33 | echo. man to make manual pages
34 | echo. texinfo to make Texinfo files
35 | echo. gettext to make PO message catalogs
36 | echo. changes to make an overview over all changed/added/deprecated items
37 | echo. xml to make Docutils-native XML files
38 | echo. pseudoxml to make pseudoxml-XML files for display purposes
39 | echo. linkcheck to check all external links for integrity
40 | echo. doctest to run all doctests embedded in the documentation if enabled
41 | echo. coverage to run coverage check of the documentation if enabled
42 | echo. dummy to check syntax errors of document sources
43 | goto end
44 | )
45 |
46 | if "%1" == "clean" (
47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
48 | del /q /s %BUILDDIR%\*
49 | goto end
50 | )
51 |
52 |
53 | REM Check if sphinx-build is available and fallback to Python version if any
54 | %SPHINXBUILD% 1>NUL 2>NUL
55 | if errorlevel 9009 goto sphinx_python
56 | goto sphinx_ok
57 |
58 | :sphinx_python
59 |
60 | set SPHINXBUILD=python -m sphinx.__init__
61 | %SPHINXBUILD% 2> nul
62 | if errorlevel 9009 (
63 | echo.
64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
65 | echo.installed, then set the SPHINXBUILD environment variable to point
66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
67 | echo.may add the Sphinx directory to PATH.
68 | echo.
69 | echo.If you don't have Sphinx installed, grab it from
70 | echo.http://sphinx-doc.org/
71 | exit /b 1
72 | )
73 |
74 | :sphinx_ok
75 |
76 |
77 | if "%1" == "html" (
78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
79 | if errorlevel 1 exit /b 1
80 | echo.
81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
82 | goto end
83 | )
84 |
85 | if "%1" == "dirhtml" (
86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
87 | if errorlevel 1 exit /b 1
88 | echo.
89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
90 | goto end
91 | )
92 |
93 | if "%1" == "singlehtml" (
94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
95 | if errorlevel 1 exit /b 1
96 | echo.
97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
98 | goto end
99 | )
100 |
101 | if "%1" == "pickle" (
102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
103 | if errorlevel 1 exit /b 1
104 | echo.
105 | echo.Build finished; now you can process the pickle files.
106 | goto end
107 | )
108 |
109 | if "%1" == "json" (
110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
111 | if errorlevel 1 exit /b 1
112 | echo.
113 | echo.Build finished; now you can process the JSON files.
114 | goto end
115 | )
116 |
117 | if "%1" == "htmlhelp" (
118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
119 | if errorlevel 1 exit /b 1
120 | echo.
121 | echo.Build finished; now you can run HTML Help Workshop with the ^
122 | .hhp project file in %BUILDDIR%/htmlhelp.
123 | goto end
124 | )
125 |
126 | if "%1" == "qthelp" (
127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
128 | if errorlevel 1 exit /b 1
129 | echo.
130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
131 | .qhcp project file in %BUILDDIR%/qthelp, like this:
132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\yarl.qhcp
133 | echo.To view the help file:
134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\yarl.ghc
135 | goto end
136 | )
137 |
138 | if "%1" == "devhelp" (
139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
140 | if errorlevel 1 exit /b 1
141 | echo.
142 | echo.Build finished.
143 | goto end
144 | )
145 |
146 | if "%1" == "epub" (
147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
148 | if errorlevel 1 exit /b 1
149 | echo.
150 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
151 | goto end
152 | )
153 |
154 | if "%1" == "epub3" (
155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
156 | if errorlevel 1 exit /b 1
157 | echo.
158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
159 | goto end
160 | )
161 |
162 | if "%1" == "latex" (
163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
164 | if errorlevel 1 exit /b 1
165 | echo.
166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdf" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "latexpdfja" (
181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
182 | cd %BUILDDIR%/latex
183 | make all-pdf-ja
184 | cd %~dp0
185 | echo.
186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
187 | goto end
188 | )
189 |
190 | if "%1" == "text" (
191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
192 | if errorlevel 1 exit /b 1
193 | echo.
194 | echo.Build finished. The text files are in %BUILDDIR%/text.
195 | goto end
196 | )
197 |
198 | if "%1" == "man" (
199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
200 | if errorlevel 1 exit /b 1
201 | echo.
202 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
203 | goto end
204 | )
205 |
206 | if "%1" == "texinfo" (
207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
208 | if errorlevel 1 exit /b 1
209 | echo.
210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
211 | goto end
212 | )
213 |
214 | if "%1" == "gettext" (
215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
216 | if errorlevel 1 exit /b 1
217 | echo.
218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
219 | goto end
220 | )
221 |
222 | if "%1" == "changes" (
223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
224 | if errorlevel 1 exit /b 1
225 | echo.
226 | echo.The overview file is in %BUILDDIR%/changes.
227 | goto end
228 | )
229 |
230 | if "%1" == "linkcheck" (
231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
232 | if errorlevel 1 exit /b 1
233 | echo.
234 | echo.Link check complete; look for any errors in the above output ^
235 | or in %BUILDDIR%/linkcheck/output.txt.
236 | goto end
237 | )
238 |
239 | if "%1" == "doctest" (
240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
241 | if errorlevel 1 exit /b 1
242 | echo.
243 | echo.Testing of doctests in the sources finished, look at the ^
244 | results in %BUILDDIR%/doctest/output.txt.
245 | goto end
246 | )
247 |
248 | if "%1" == "coverage" (
249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
250 | if errorlevel 1 exit /b 1
251 | echo.
252 | echo.Testing of coverage in the sources finished, look at the ^
253 | results in %BUILDDIR%/coverage/python.txt.
254 | goto end
255 | )
256 |
257 | if "%1" == "xml" (
258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
259 | if errorlevel 1 exit /b 1
260 | echo.
261 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
262 | goto end
263 | )
264 |
265 | if "%1" == "pseudoxml" (
266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
267 | if errorlevel 1 exit /b 1
268 | echo.
269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
270 | goto end
271 | )
272 |
273 | if "%1" == "dummy" (
274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
275 | if errorlevel 1 exit /b 1
276 | echo.
277 | echo.Build finished. Dummy builder generates no files.
278 | goto end
279 | )
280 |
281 | :end
282 |
--------------------------------------------------------------------------------
/docs/spelling_wordlist.txt:
--------------------------------------------------------------------------------
1 | Bluesky
2 | Bugfixes
3 | CPython
4 | Changelog
5 | Codecov
6 | Cython
7 | GPG
8 | IPv
9 | PRs
10 | PYX
11 | Towncrier
12 | Twitter
13 | UTF
14 | aiohttp
15 | armv
16 | ascii
17 | backend
18 | boolean
19 | booleans
20 | bools
21 | changelog
22 | changelogs
23 | config
24 | de
25 | decodable
26 | dev
27 | dists
28 | downstreams
29 | facto
30 | glibc
31 | google
32 | hardcoded
33 | hostnames
34 | macOS
35 | mailto
36 | manylinux
37 | multi
38 | nightlies
39 | pre
40 | pytest
41 | rc
42 | reStructuredText
43 | reencoding
44 | requote
45 | requoting
46 | runtimes
47 | sdist
48 | subclass
49 | subclasses
50 | subcomponent
51 | svetlov
52 | uncompiled
53 | unencoded
54 | unquoter
55 | v1
56 | yarl
57 |
--------------------------------------------------------------------------------
/docs/yarl-icon-128x128.xcf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aio-libs/yarl/b1ce3027d0d1ae790337af3d569345c42dbfcdee/docs/yarl-icon-128x128.xcf
--------------------------------------------------------------------------------
/packaging/README.md:
--------------------------------------------------------------------------------
1 | # `pep517_backend` in-tree build backend
2 |
3 | The `pep517_backend.hooks` importable exposes callables declared by PEP 517
4 | and PEP 660 and is integrated into `pyproject.toml`'s
5 | `[build-system].build-backend` through `[build-system].backend-path`.
6 |
7 | # Design considerations
8 |
9 | `__init__.py` is to remain empty, leaving `hooks.py` the only entrypoint
10 | exposing the callables. The logic is contained in private modules. This is
11 | to prevent import-time side effects.
12 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/__init__.py:
--------------------------------------------------------------------------------
1 | """PEP 517 build backend for optionally pre-building Cython."""
2 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/__main__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from . import cli
4 |
5 | if __name__ == "__main__":
6 | sys.exit(cli.run_main_program(argv=sys.argv))
7 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/_backend.py:
--------------------------------------------------------------------------------
1 | # fmt: off
2 | """PEP 517 build backend wrapper for pre-building Cython for wheel."""
3 |
4 | from __future__ import annotations
5 |
6 | import os
7 | import sysconfig
8 | from collections.abc import Iterator
9 | from contextlib import contextmanager, nullcontext, suppress
10 | from functools import partial
11 | from pathlib import Path
12 | from shutil import copytree
13 | from sys import implementation as _system_implementation
14 | from sys import stderr as _standard_error_stream
15 | from tempfile import TemporaryDirectory
16 | from typing import Union
17 | from warnings import warn as _warn_that
18 |
19 | from setuptools.build_meta import build_sdist as _setuptools_build_sdist
20 | from setuptools.build_meta import build_wheel as _setuptools_build_wheel
21 | from setuptools.build_meta import (
22 | get_requires_for_build_wheel as _setuptools_get_requires_for_build_wheel,
23 | )
24 | from setuptools.build_meta import (
25 | prepare_metadata_for_build_wheel as _setuptools_prepare_metadata_for_build_wheel,
26 | )
27 |
28 | try:
29 | from setuptools.build_meta import build_editable as _setuptools_build_editable
30 | except ImportError:
31 | _setuptools_build_editable = None # type: ignore[assignment]
32 |
33 |
34 | # isort: split
35 | from distutils.command.install import install as _distutils_install_cmd
36 | from distutils.core import Distribution as _DistutilsDistribution
37 | from distutils.dist import DistributionMetadata as _DistutilsDistributionMetadata
38 |
39 | with suppress(ImportError):
40 | # NOTE: Only available for wheel builds that bundle C-extensions. Declared
41 | # NOTE: by `get_requires_for_build_wheel()` and
42 | # NOTE: `get_requires_for_build_editable()`, when `pure-python`
43 | # NOTE: is not passed.
44 | from Cython.Build.Cythonize import main as _cythonize_cli_cmd
45 |
46 | from ._compat import chdir_cm
47 | from ._cython_configuration import get_local_cython_config as _get_local_cython_config
48 | from ._cython_configuration import (
49 | make_cythonize_cli_args_from_config as _make_cythonize_cli_args_from_config,
50 | )
51 | from ._cython_configuration import patched_env as _patched_cython_env
52 | from ._transformers import sanitize_rst_roles
53 |
54 | __all__ = ( # noqa: WPS410
55 | 'build_sdist',
56 | 'build_wheel',
57 | 'get_requires_for_build_wheel',
58 | 'prepare_metadata_for_build_wheel',
59 | *(
60 | () if _setuptools_build_editable is None # type: ignore[redundant-expr]
61 | else (
62 | 'build_editable',
63 | 'get_requires_for_build_editable',
64 | 'prepare_metadata_for_build_editable',
65 | )
66 | ),
67 | )
68 |
69 | _ConfigDict = dict[str, Union[str, list[str], None]]
70 |
71 |
72 | CYTHON_TRACING_CONFIG_SETTING = 'with-cython-tracing'
73 | """Config setting name toggle to include line tracing to C-exts."""
74 |
75 | CYTHON_TRACING_ENV_VAR = 'YARL_CYTHON_TRACING'
76 | """Environment variable name toggle used to opt out of making C-exts."""
77 |
78 | PURE_PYTHON_CONFIG_SETTING = 'pure-python'
79 | """Config setting name toggle that is used to opt out of making C-exts."""
80 |
81 | PURE_PYTHON_ENV_VAR = 'YARL_NO_EXTENSIONS'
82 | """Environment variable name toggle used to opt out of making C-exts."""
83 |
84 | IS_CPYTHON = _system_implementation.name == "cpython"
85 | """A flag meaning that the current interpreter implementation is CPython."""
86 |
87 | PURE_PYTHON_MODE_CLI_FALLBACK = not IS_CPYTHON
88 | """A fallback for ``pure-python`` is not set."""
89 |
90 |
91 | def _is_truthy_setting_value(setting_value: str) -> bool:
92 | truthy_values = {'', None, 'true', '1', 'on'}
93 | return setting_value.lower() in truthy_values
94 |
95 |
96 | def _get_setting_value(
97 | config_settings: _ConfigDict | None = None,
98 | config_setting_name: str | None = None,
99 | env_var_name: str | None = None,
100 | *,
101 | default: bool = False,
102 | ) -> bool:
103 | user_provided_setting_sources = (
104 | (config_settings, config_setting_name, (KeyError, TypeError)),
105 | (os.environ, env_var_name, KeyError),
106 | )
107 | for src_mapping, src_key, lookup_errors in user_provided_setting_sources:
108 | if src_key is None:
109 | continue
110 |
111 | with suppress(lookup_errors): # type: ignore[arg-type]
112 | return _is_truthy_setting_value(src_mapping[src_key]) # type: ignore[arg-type,index]
113 |
114 | return default
115 |
116 |
117 | def _make_pure_python(config_settings: _ConfigDict | None = None) -> bool:
118 | return _get_setting_value(
119 | config_settings,
120 | PURE_PYTHON_CONFIG_SETTING,
121 | PURE_PYTHON_ENV_VAR,
122 | default=PURE_PYTHON_MODE_CLI_FALLBACK,
123 | )
124 |
125 |
126 | def _include_cython_line_tracing(
127 | config_settings: _ConfigDict | None = None,
128 | *,
129 | default: bool = False,
130 | ) -> bool:
131 | return _get_setting_value(
132 | config_settings,
133 | CYTHON_TRACING_CONFIG_SETTING,
134 | CYTHON_TRACING_ENV_VAR,
135 | default=default,
136 | )
137 |
138 |
139 | @contextmanager
140 | def patched_distutils_cmd_install() -> Iterator[None]:
141 | """Make `install_lib` of `install` cmd always use `platlib`.
142 |
143 | :yields: None
144 | """
145 | # Without this, build_lib puts stuff under `*.data/purelib/` folder
146 | orig_finalize = _distutils_install_cmd.finalize_options
147 |
148 | def new_finalize_options(self: _distutils_install_cmd) -> None:
149 | self.install_lib = self.install_platlib
150 | orig_finalize(self)
151 |
152 | _distutils_install_cmd.finalize_options = new_finalize_options # type: ignore[method-assign]
153 | try:
154 | yield
155 | finally:
156 | _distutils_install_cmd.finalize_options = orig_finalize # type: ignore[method-assign]
157 |
158 |
159 | @contextmanager
160 | def patched_dist_has_ext_modules() -> Iterator[None]:
161 | """Make `has_ext_modules` of `Distribution` always return `True`.
162 |
163 | :yields: None
164 | """
165 | # Without this, build_lib puts stuff under `*.data/platlib/` folder
166 | orig_func = _DistutilsDistribution.has_ext_modules
167 |
168 | _DistutilsDistribution.has_ext_modules = lambda *args, **kwargs: True # type: ignore[method-assign]
169 | try:
170 | yield
171 | finally:
172 | _DistutilsDistribution.has_ext_modules = orig_func # type: ignore[method-assign]
173 |
174 |
175 | @contextmanager
176 | def patched_dist_get_long_description() -> Iterator[None]:
177 | """Make `has_ext_modules` of `Distribution` always return `True`.
178 |
179 | :yields: None
180 | """
181 | # Without this, build_lib puts stuff under `*.data/platlib/` folder
182 | _orig_func = _DistutilsDistributionMetadata.get_long_description
183 |
184 | def _get_sanitized_long_description(self: _DistutilsDistributionMetadata) -> str:
185 | assert self.long_description is not None
186 | return sanitize_rst_roles(self.long_description)
187 |
188 | _DistutilsDistributionMetadata.get_long_description = ( # type: ignore[method-assign]
189 | _get_sanitized_long_description
190 | )
191 | try:
192 | yield
193 | finally:
194 | _DistutilsDistributionMetadata.get_long_description = _orig_func # type: ignore[method-assign]
195 |
196 |
197 | def _exclude_dir_path(
198 | excluded_dir_path: Path,
199 | visited_directory: str,
200 | _visited_dir_contents: list[str],
201 | ) -> list[str]:
202 | """Prevent recursive directory traversal."""
203 | # This stops the temporary directory from being copied
204 | # into self recursively forever.
205 | # Ref: https://github.com/aio-libs/yarl/issues/992
206 | visited_directory_subdirs_to_ignore = [
207 | subdir
208 | for subdir in _visited_dir_contents
209 | if excluded_dir_path == Path(visited_directory) / subdir
210 | ]
211 | if visited_directory_subdirs_to_ignore:
212 | print(
213 | f'Preventing `{excluded_dir_path !s}` from being '
214 | 'copied into itself recursively...',
215 | file=_standard_error_stream,
216 | )
217 | return visited_directory_subdirs_to_ignore
218 |
219 |
220 | @contextmanager
221 | def _in_temporary_directory(src_dir: Path) -> Iterator[None]:
222 | with TemporaryDirectory(prefix='.tmp-yarl-pep517-') as tmp_dir:
223 | tmp_dir_path = Path(tmp_dir)
224 | root_tmp_dir_path = tmp_dir_path.parent
225 | _exclude_tmpdir_parent = partial(_exclude_dir_path, root_tmp_dir_path)
226 |
227 | with chdir_cm(tmp_dir):
228 | tmp_src_dir = tmp_dir_path / 'src'
229 | copytree(
230 | src_dir,
231 | tmp_src_dir,
232 | ignore=_exclude_tmpdir_parent,
233 | symlinks=True,
234 | )
235 | os.chdir(tmp_src_dir)
236 | yield
237 |
238 |
239 | @contextmanager
240 | def maybe_prebuild_c_extensions(
241 | line_trace_cython_when_unset: bool = False,
242 | build_inplace: bool = False,
243 | config_settings: _ConfigDict | None = None,
244 | ) -> Iterator[None]:
245 | """Pre-build C-extensions in a temporary directory, when needed.
246 |
247 | This context manager also patches metadata, setuptools and distutils.
248 |
249 | :param build_inplace: Whether to copy and chdir to a temporary location.
250 | :param config_settings: :pep:`517` config settings mapping.
251 |
252 | """
253 | cython_line_tracing_requested = _include_cython_line_tracing(
254 | config_settings,
255 | default=line_trace_cython_when_unset,
256 | )
257 | is_pure_python_build = _make_pure_python(config_settings)
258 |
259 | if is_pure_python_build:
260 | print("*********************", file=_standard_error_stream)
261 | print("* Pure Python build *", file=_standard_error_stream)
262 | print("*********************", file=_standard_error_stream)
263 |
264 | if cython_line_tracing_requested:
265 | _warn_that(
266 | f'The `{CYTHON_TRACING_CONFIG_SETTING !s}` setting requesting '
267 | 'Cython line tracing is set, but building C-extensions is not. '
268 | 'This option will not have any effect for in the pure-python '
269 | 'build mode.',
270 | RuntimeWarning,
271 | stacklevel=999,
272 | )
273 |
274 | yield
275 | return
276 |
277 | print("**********************", file=_standard_error_stream)
278 | print("* Accelerated build *", file=_standard_error_stream)
279 | print("**********************", file=_standard_error_stream)
280 | if not IS_CPYTHON:
281 | _warn_that(
282 | 'Building C-extensions under the runtimes other than CPython is '
283 | 'unsupported and will likely fail. Consider passing the '
284 | f'`{PURE_PYTHON_CONFIG_SETTING !s}` PEP 517 config setting.',
285 | RuntimeWarning,
286 | stacklevel=999,
287 | )
288 |
289 | build_dir_ctx = (
290 | nullcontext() if build_inplace
291 | else _in_temporary_directory(src_dir=Path.cwd().resolve())
292 | )
293 | with build_dir_ctx:
294 | config = _get_local_cython_config()
295 |
296 | cythonize_args = _make_cythonize_cli_args_from_config(config)
297 | with _patched_cython_env(config['env'], cython_line_tracing_requested):
298 | _cythonize_cli_cmd(cythonize_args) # type: ignore[no-untyped-call]
299 | with patched_distutils_cmd_install():
300 | with patched_dist_has_ext_modules():
301 | yield
302 |
303 |
304 | @patched_dist_get_long_description()
305 | def build_wheel(
306 | wheel_directory: str,
307 | config_settings: _ConfigDict | None = None,
308 | metadata_directory: str | None = None,
309 | ) -> str:
310 | """Produce a built wheel.
311 |
312 | This wraps the corresponding ``setuptools``' build backend hook.
313 |
314 | :param wheel_directory: Directory to put the resulting wheel in.
315 | :param config_settings: :pep:`517` config settings mapping.
316 | :param metadata_directory: :file:`.dist-info` directory path.
317 |
318 | """
319 | with maybe_prebuild_c_extensions(
320 | line_trace_cython_when_unset=False,
321 | build_inplace=False,
322 | config_settings=config_settings,
323 | ):
324 | return _setuptools_build_wheel(
325 | wheel_directory=wheel_directory,
326 | config_settings=config_settings,
327 | metadata_directory=metadata_directory,
328 | )
329 |
330 |
331 | @patched_dist_get_long_description()
332 | def build_editable(
333 | wheel_directory: str,
334 | config_settings: _ConfigDict | None = None,
335 | metadata_directory: str | None = None,
336 | ) -> str:
337 | """Produce a built wheel for editable installs.
338 |
339 | This wraps the corresponding ``setuptools``' build backend hook.
340 |
341 | :param wheel_directory: Directory to put the resulting wheel in.
342 | :param config_settings: :pep:`517` config settings mapping.
343 | :param metadata_directory: :file:`.dist-info` directory path.
344 |
345 | """
346 | with maybe_prebuild_c_extensions(
347 | line_trace_cython_when_unset=True,
348 | build_inplace=True,
349 | config_settings=config_settings,
350 | ):
351 | return _setuptools_build_editable(
352 | wheel_directory=wheel_directory,
353 | config_settings=config_settings,
354 | metadata_directory=metadata_directory,
355 | )
356 |
357 |
358 | def get_requires_for_build_wheel(
359 | config_settings: _ConfigDict | None = None,
360 | ) -> list[str]:
361 | """Determine additional requirements for building wheels.
362 |
363 | :param config_settings: :pep:`517` config settings mapping.
364 |
365 | """
366 | is_pure_python_build = _make_pure_python(config_settings)
367 |
368 | if not is_pure_python_build and not IS_CPYTHON:
369 | _warn_that(
370 | 'Building C-extensions under the runtimes other than CPython is '
371 | 'unsupported and will likely fail. Consider passing the '
372 | f'`{PURE_PYTHON_CONFIG_SETTING !s}` PEP 517 config setting.',
373 | RuntimeWarning,
374 | stacklevel=999,
375 | )
376 |
377 | if is_pure_python_build:
378 | c_ext_build_deps = []
379 | elif sysconfig.get_config_var('Py_GIL_DISABLED'):
380 | c_ext_build_deps = ['Cython ~= 3.1.0a1']
381 | else:
382 | c_ext_build_deps = ['Cython >= 3.0.12']
383 |
384 | return _setuptools_get_requires_for_build_wheel(
385 | config_settings=config_settings,
386 | ) + c_ext_build_deps
387 |
388 |
389 | build_sdist = patched_dist_get_long_description()(_setuptools_build_sdist)
390 | get_requires_for_build_editable = get_requires_for_build_wheel
391 | prepare_metadata_for_build_wheel = patched_dist_get_long_description()(
392 | _setuptools_prepare_metadata_for_build_wheel,
393 | )
394 | prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel
395 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/_compat.py:
--------------------------------------------------------------------------------
1 | """Cross-python stdlib shims."""
2 |
3 | import os
4 | import sys
5 | from collections.abc import Iterator
6 | from contextlib import contextmanager
7 | from pathlib import Path
8 |
9 | if sys.version_info >= (3, 11):
10 | from contextlib import chdir as chdir_cm
11 | from tomllib import loads as load_toml_from_string
12 | else:
13 | from tomli import loads as load_toml_from_string
14 |
15 | @contextmanager # type: ignore[no-redef]
16 | def chdir_cm(path: "os.PathLike[str]") -> Iterator[None]:
17 | """Temporarily change the current directory, recovering on exit."""
18 | original_wd = Path.cwd()
19 | os.chdir(path)
20 | try:
21 | yield
22 | finally:
23 | os.chdir(original_wd)
24 |
25 |
26 | __all__ = ("chdir_cm", "load_toml_from_string") # noqa: WPS410
27 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/_cython_configuration.py:
--------------------------------------------------------------------------------
1 | # fmt: off
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | from collections.abc import Iterator
7 | from contextlib import contextmanager
8 | from pathlib import Path
9 | from sys import version_info as _python_version_tuple
10 | from typing import TypedDict
11 |
12 | from expandvars import expandvars
13 |
14 | from ._compat import load_toml_from_string
15 | from ._transformers import get_cli_kwargs_from_config, get_enabled_cli_flags_from_config
16 |
17 |
18 | class Config(TypedDict):
19 | env: dict[str, str]
20 | flags: dict[str, bool]
21 | kwargs: dict[str, str]
22 | src: list[str]
23 |
24 |
25 | def get_local_cython_config() -> Config:
26 | """Grab optional build dependencies from pyproject.toml config.
27 |
28 | :returns: config section from ``pyproject.toml``
29 | :rtype: dict
30 |
31 | This basically reads entries from::
32 |
33 | [tool.local.cythonize]
34 | # Env vars provisioned during cythonize call
35 | src = ["src/**/*.pyx"]
36 |
37 | [tool.local.cythonize.env]
38 | # Env vars provisioned during cythonize call
39 | LDFLAGS = "-lssh"
40 |
41 | [tool.local.cythonize.flags]
42 | # This section can contain the following booleans:
43 | # * annotate — generate annotated HTML page for source files
44 | # * build — build extension modules using distutils
45 | # * inplace — build extension modules in place using distutils (implies -b)
46 | # * force — force recompilation
47 | # * quiet — be less verbose during compilation
48 | # * lenient — increase Python compat by ignoring some compile time errors
49 | # * keep-going — compile as much as possible, ignore compilation failures
50 | annotate = false
51 | build = false
52 | inplace = true
53 | force = true
54 | quiet = false
55 | lenient = false
56 | keep-going = false
57 |
58 | [tool.local.cythonize.kwargs]
59 | # This section can contain args that have values:
60 | # * exclude=PATTERN exclude certain file patterns from the compilation
61 | # * parallel=N run builds in N parallel jobs (default: calculated per system)
62 | exclude = "**.py"
63 | parallel = 12
64 |
65 | [tool.local.cythonize.kwargs.directives]
66 | # This section can contain compiler directives
67 | # NAME = "VALUE"
68 |
69 | [tool.local.cythonize.kwargs.compile-time-env]
70 | # This section can contain compile time env vars
71 | # NAME = "VALUE"
72 |
73 | [tool.local.cythonize.kwargs.options]
74 | # This section can contain cythonize options
75 | # NAME = "VALUE"
76 | """
77 | config_toml_txt = (Path.cwd().resolve() / 'pyproject.toml').read_text()
78 | config_mapping = load_toml_from_string(config_toml_txt)
79 | return config_mapping['tool']['local']['cythonize'] # type: ignore[no-any-return]
80 |
81 |
82 | def make_cythonize_cli_args_from_config(config: Config) -> list[str]:
83 | py_ver_arg = f'-{_python_version_tuple.major!s}'
84 |
85 | cli_flags = get_enabled_cli_flags_from_config(config['flags'])
86 | cli_kwargs = get_cli_kwargs_from_config(config['kwargs'])
87 |
88 | return cli_flags + [py_ver_arg] + cli_kwargs + ['--'] + config['src']
89 |
90 |
91 | @contextmanager
92 | def patched_env(env: dict[str, str], cython_line_tracing_requested: bool) -> Iterator[None]:
93 | """Temporary set given env vars.
94 |
95 | :param env: tmp env vars to set
96 | :type env: dict
97 |
98 | :yields: None
99 | """
100 | orig_env = os.environ.copy()
101 | expanded_env = {name: expandvars(var_val) for name, var_val in env.items()} # type: ignore[no-untyped-call]
102 | os.environ.update(expanded_env)
103 |
104 | if cython_line_tracing_requested:
105 | os.environ['CFLAGS'] = ' '.join((
106 | os.getenv('CFLAGS', ''),
107 | '-DCYTHON_TRACE_NOGIL=1', # Implies CYTHON_TRACE=1
108 | )).strip()
109 | try:
110 | yield
111 | finally:
112 | os.environ.clear()
113 | os.environ.update(orig_env)
114 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/_transformers.py:
--------------------------------------------------------------------------------
1 | """Data conversion helpers for the in-tree PEP 517 build backend."""
2 |
3 | from collections.abc import Iterable, Iterator, Mapping
4 | from itertools import chain
5 | from re import sub as _substitute_with_regexp
6 | from typing import Union
7 |
8 |
9 | def _emit_opt_pairs(opt_pair: tuple[str, Union[dict[str, str], str]]) -> Iterator[str]:
10 | flag, flag_value = opt_pair
11 | flag_opt = f"--{flag!s}"
12 | if isinstance(flag_value, dict):
13 | sub_pairs: Iterable[tuple[str, ...]] = flag_value.items()
14 | else:
15 | sub_pairs = ((flag_value,),)
16 |
17 | yield from ("=".join(map(str, (flag_opt,) + pair)) for pair in sub_pairs)
18 |
19 |
20 | def get_cli_kwargs_from_config(kwargs_map: dict[str, str]) -> list[str]:
21 | """Make a list of options with values from config."""
22 | return list(chain.from_iterable(map(_emit_opt_pairs, kwargs_map.items())))
23 |
24 |
25 | def get_enabled_cli_flags_from_config(flags_map: Mapping[str, bool]) -> list[str]:
26 | """Make a list of enabled boolean flags from config."""
27 | return [f"--{flag}" for flag, is_enabled in flags_map.items() if is_enabled]
28 |
29 |
30 | def sanitize_rst_roles(rst_source_text: str) -> str:
31 | """Replace RST roles with inline highlighting."""
32 | pep_role_regex = r"""(?x)
33 | :pep:`(?P\d+)`
34 | """
35 | pep_substitution_pattern = (
36 | r"`PEP \g >`__"
37 | )
38 |
39 | user_role_regex = r"""(?x)
40 | :user:`(?P[^`]+)(?:\s+(.*))?`
41 | """
42 | user_substitution_pattern = (
43 | r"`@\g "
44 | r">`__"
45 | )
46 |
47 | issue_role_regex = r"""(?x)
48 | :issue:`(?P[^`]+)(?:\s+(.*))?`
49 | """
50 | issue_substitution_pattern = (
51 | r"`#\g "
52 | r">`__"
53 | )
54 |
55 | pr_role_regex = r"""(?x)
56 | :pr:`(?P[^`]+)(?:\s+(.*))?`
57 | """
58 | pr_substitution_pattern = (
59 | r"`PR #\g "
60 | r">`__"
61 | )
62 |
63 | commit_role_regex = r"""(?x)
64 | :commit:`(?P[^`]+)(?:\s+(.*))?`
65 | """
66 | commit_substitution_pattern = (
67 | r"`\g "
68 | r">`__"
69 | )
70 |
71 | gh_role_regex = r"""(?x)
72 | :gh:`(?P[^`<]+)(?:\s+([^`]*))?`
73 | """
74 | gh_substitution_pattern = r"GitHub: ``\g``"
75 |
76 | meth_role_regex = r"""(?x)
77 | (?::py)?:meth:`~?(?P[^`<]+)(?:\s+([^`]*))?`
78 | """
79 | meth_substitution_pattern = r"``\g()``"
80 |
81 | role_regex = r"""(?x)
82 | (?::\w+)?:\w+:`(?P[^`<]+)(?:\s+([^`]*))?`
83 | """
84 | substitution_pattern = r"``\g``"
85 |
86 | project_substitution_regex = r"\|project\|"
87 | project_substitution_pattern = "yarl"
88 |
89 | substitutions = (
90 | (pep_role_regex, pep_substitution_pattern),
91 | (user_role_regex, user_substitution_pattern),
92 | (issue_role_regex, issue_substitution_pattern),
93 | (pr_role_regex, pr_substitution_pattern),
94 | (commit_role_regex, commit_substitution_pattern),
95 | (gh_role_regex, gh_substitution_pattern),
96 | (meth_role_regex, meth_substitution_pattern),
97 | (role_regex, substitution_pattern),
98 | (project_substitution_regex, project_substitution_pattern),
99 | )
100 |
101 | rst_source_normalized_text = rst_source_text
102 | for regex, substitution in substitutions:
103 | rst_source_normalized_text = _substitute_with_regexp(
104 | regex,
105 | substitution,
106 | rst_source_normalized_text,
107 | )
108 |
109 | return rst_source_normalized_text
110 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/cli.py:
--------------------------------------------------------------------------------
1 | # fmt: off
2 |
3 | from __future__ import annotations
4 |
5 | import sys
6 | from collections.abc import Sequence
7 | from itertools import chain
8 | from pathlib import Path
9 |
10 | from Cython.Compiler.CmdLine import parse_command_line as _split_cython_cli_args
11 | from Cython.Compiler.Main import compile as _translate_cython_cli_cmd
12 |
13 | from ._cython_configuration import get_local_cython_config as _get_local_cython_config
14 | from ._cython_configuration import (
15 | make_cythonize_cli_args_from_config as _make_cythonize_cli_args_from_config,
16 | )
17 | from ._cython_configuration import patched_env as _patched_cython_env
18 |
19 | _PROJECT_PATH = Path(__file__).parents[2]
20 |
21 |
22 | def run_main_program(argv: Sequence[str]) -> int | str:
23 | """Invoke ``translate-cython`` or fail."""
24 | if len(argv) != 2:
25 | return 'This program only accepts one argument -- "translate-cython"'
26 |
27 | if argv[1] != 'translate-cython':
28 | return 'This program only implements the "translate-cython" subcommand'
29 |
30 | config = _get_local_cython_config()
31 | config['flags'] = {'keep-going': config['flags']['keep-going']}
32 | config['src'] = list(
33 | map(
34 | str,
35 | chain.from_iterable(
36 | map(_PROJECT_PATH.glob, config['src']),
37 | ),
38 | ),
39 | )
40 | translate_cython_cli_args = _make_cythonize_cli_args_from_config(config)
41 |
42 | cython_options, cython_sources = _split_cython_cli_args( # type: ignore[no-untyped-call]
43 | translate_cython_cli_args,
44 | )
45 |
46 | with _patched_cython_env(config['env'], cython_line_tracing_requested=True):
47 | return _translate_cython_cli_cmd( # type: ignore[no-any-return,no-untyped-call]
48 | cython_sources,
49 | cython_options,
50 | ).num_errors
51 |
52 |
53 | if __name__ == '__main__':
54 | sys.exit(run_main_program(argv=sys.argv))
55 |
--------------------------------------------------------------------------------
/packaging/pep517_backend/hooks.py:
--------------------------------------------------------------------------------
1 | """PEP 517 build backend for optionally pre-building Cython."""
2 |
3 | from contextlib import suppress as _suppress
4 |
5 | from setuptools.build_meta import * # Re-exporting PEP 517 hooks # pylint: disable=unused-wildcard-import,wildcard-import # noqa: F401, F403
6 |
7 | # Re-exporting PEP 517 hooks
8 | from ._backend import ( # type: ignore[assignment]
9 | build_sdist,
10 | build_wheel,
11 | get_requires_for_build_wheel,
12 | prepare_metadata_for_build_wheel,
13 | )
14 |
15 | with _suppress(ImportError): # Only succeeds w/ setuptools implementing PEP 660
16 | # Re-exporting PEP 660 hooks
17 | from ._backend import ( # type: ignore[assignment]
18 | build_editable,
19 | get_requires_for_build_editable,
20 | prepare_metadata_for_build_editable,
21 | )
22 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | # NOTE: The following build dependencies are necessary for initial
4 | # NOTE: provisioning of the in-tree build backend located under
5 | # NOTE: `packaging/pep517_backend/`.
6 | "expandvars",
7 | "setuptools >= 47", # Minimum required for `version = attr:`
8 | "tomli; python_version < '3.11'",
9 | ]
10 | backend-path = ["packaging"] # requires `pip >= 20` or `pep517 >= 0.6.0`
11 | build-backend = "pep517_backend.hooks" # wraps `setuptools.build_meta`
12 |
13 | [tool.local.cythonize]
14 | # This attr can contain multiple globs
15 | src = ["yarl/*.pyx"]
16 |
17 | [tool.local.cythonize.env]
18 | # Env vars provisioned during cythonize call
19 | #CFLAGS = "-DCYTHON_TRACE=1 ${CFLAGS}"
20 | #LDFLAGS = "${LDFLAGS}"
21 |
22 | [tool.local.cythonize.flags]
23 | # This section can contain the following booleans:
24 | # * annotate — generate annotated HTML page for source files
25 | # * build — build extension modules using distutils
26 | # * inplace — build extension modules in place using distutils (implies -b)
27 | # * force — force recompilation
28 | # * quiet — be less verbose during compilation
29 | # * lenient — increase Python compat by ignoring some compile time errors
30 | # * keep-going — compile as much as possible, ignore compilation failures
31 | annotate = false
32 | build = false
33 | inplace = true
34 | force = true
35 | quiet = false
36 | lenient = false
37 | keep-going = false
38 |
39 | [tool.local.cythonize.kwargs]
40 | # This section can contain args that have values:
41 | # * exclude=PATTERN exclude certain file patterns from the compilation
42 | # * parallel=N run builds in N parallel jobs (default: calculated per system)
43 | # exclude = "**.py"
44 | # parallel = 12
45 |
46 | [tool.local.cythonize.kwargs.directive]
47 | # This section can contain compiler directives. Ref:
48 | # https://cython.rtfd.io/en/latest/src/userguide/source_files_and_compilation.html#compiler-directives
49 | embedsignature = "True"
50 | emit_code_comments = "True"
51 | linetrace = "True" # Implies `profile=True`
52 |
53 | [tool.local.cythonize.kwargs.compile-time-env]
54 | # This section can contain compile time env vars
55 |
56 | [tool.local.cythonize.kwargs.option]
57 | # This section can contain cythonize options
58 | # Ref: https://github.com/cython/cython/blob/d6e6de9/Cython/Compiler/Options.py#L694-L730
59 | #docstrings = "True"
60 | #embed_pos_in_docstring = "True"
61 | #warning_errors = "True"
62 | #error_on_unknown_names = "True"
63 | #error_on_uninitialized = "True"
64 |
65 | [tool.cibuildwheel]
66 | build-frontend = "build"
67 | enable = ["cpython-freethreading"]
68 | before-test = [
69 | # NOTE: Attempt to have pip pre-compile PyYAML wheel with our build
70 | # NOTE: constraints unset. The hope is that pip will cache that wheel
71 | # NOTE: and the test env provisioning stage will pick up PyYAML from
72 | # NOTE: said cache rather than attempting to build it with a conflicting.
73 | # NOTE: Version of Cython.
74 | # Ref: https://github.com/pypa/cibuildwheel/issues/1666
75 | "PIP_CONSTRAINT= pip install PyYAML",
76 | ]
77 | test-requires = "-r requirements/test.txt"
78 | test-command = 'pytest -v -m "not hypothesis" --no-cov {project}/tests'
79 | # don't build PyPy wheels, install from source instead
80 | skip = "pp*"
81 |
82 | [tool.cibuildwheel.environment]
83 | COLOR = "yes"
84 | FORCE_COLOR = "1"
85 | MYPY_FORCE_COLOR = "1"
86 | PIP_CONSTRAINT = "requirements/cython.txt"
87 | PRE_COMMIT_COLOR = "always"
88 | PY_COLORS = "1"
89 |
90 | [tool.cibuildwheel.config-settings]
91 | pure-python = "false"
92 |
93 | [tool.cibuildwheel.windows]
94 | before-test = [] # Windows cmd has different syntax and pip chooses wheels
95 |
96 | # TODO: Remove this when there's a Cython 3.1 final release
97 | # Remove PIP_CONSTRAINT from the environment
98 | [[tool.cibuildwheel.overrides]]
99 | select = "cp313t-*"
100 |
101 | test-requires = "-r requirements/test-freethreading.txt"
102 | inherit.environment = "append"
103 | environment = {PIP_CONSTRAINT = "requirements/cython-freethreading.txt"}
104 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts =
3 | # `pytest-xdist`:
4 | --numprocesses=auto
5 |
6 | # Show 10 slowest invocations:
7 | --durations=10
8 |
9 | # Report all the things == -rxXs:
10 | -ra
11 |
12 | # Show values of the local vars in errors/tracebacks:
13 | --showlocals
14 |
15 | # Autocollect and invoke the doctests from all modules:
16 | # https://docs.pytest.org/en/stable/doctest.html
17 | --doctest-modules
18 |
19 | # Pre-load the `pytest-cov` plugin early:
20 | -p pytest_cov
21 |
22 | # `pytest-cov`:
23 | --cov
24 | --cov-config=.coveragerc
25 | --cov-context=test
26 |
27 | # Fail on config parsing warnings:
28 | # --strict-config
29 |
30 | # Fail on non-existing markers:
31 | # * Deprecated since v6.2.0 but may be reintroduced later covering a
32 | # broader scope:
33 | # --strict
34 | # * Exists since v4.5.0 (advised to be used instead of `--strict`):
35 | --strict-markers
36 |
37 | doctest_optionflags = ALLOW_UNICODE ELLIPSIS
38 |
39 | # Marks tests with an empty parameterset as xfail(run=False)
40 | empty_parameter_set_mark = xfail
41 |
42 | faulthandler_timeout = 30
43 |
44 | filterwarnings =
45 | error
46 |
47 | # FIXME: drop this once `pytest-cov` is updated.
48 | # Ref: https://github.com/pytest-dev/pytest-cov/issues/557
49 | ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning
50 |
51 | # https://github.com/pytest-dev/pytest/issues/10977 and https://github.com/pytest-dev/pytest/pull/10894
52 | ignore:ast\.(Num|NameConstant|Str) is deprecated and will be removed in Python 3\.14; use ast\.Constant instead:DeprecationWarning:_pytest
53 | ignore:Attribute s is deprecated and will be removed in Python 3\.14; use value instead:DeprecationWarning:_pytest
54 |
55 | # https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files
56 | junit_duration_report = call
57 | # xunit1 contains more metadata than xunit2 so it's better for CI UIs:
58 | junit_family = xunit1
59 | junit_logging = all
60 | junit_log_passing_tests = true
61 | junit_suite_name = yarl_test_suite
62 |
63 | # A mapping of markers to their descriptions allowed in strict mode:
64 | markers =
65 |
66 | minversion = 3.8.2
67 |
68 | # Optimize pytest's lookup by restricting potentially deep dir tree scan:
69 | norecursedirs =
70 | build
71 | dist
72 | docs
73 | requirements
74 | venv
75 | virtualenv
76 | yarl.egg-info
77 | .*
78 | *.egg
79 |
80 | testpaths = tests/
81 |
82 | xfail_strict = true
83 |
--------------------------------------------------------------------------------
/requirements/codspeed.txt:
--------------------------------------------------------------------------------
1 | -r test.txt
2 | pytest-codspeed==3.2.0
3 |
--------------------------------------------------------------------------------
/requirements/cython-freethreading.txt:
--------------------------------------------------------------------------------
1 | cython==3.1.0a1
2 |
--------------------------------------------------------------------------------
/requirements/cython.txt:
--------------------------------------------------------------------------------
1 | cython==3.0.12
2 |
--------------------------------------------------------------------------------
/requirements/dev.txt:
--------------------------------------------------------------------------------
1 | -r codspeed.txt
2 | -r towncrier.txt
3 |
--------------------------------------------------------------------------------
/requirements/doc-spelling.txt:
--------------------------------------------------------------------------------
1 | -r doc.txt
2 | sphinxcontrib-spelling==8.0.1; platform_system!="Windows" # We only use it in Azure CI
3 |
--------------------------------------------------------------------------------
/requirements/doc.txt:
--------------------------------------------------------------------------------
1 | -r towncrier.txt
2 | myst-parser >= 0.10.0
3 | sphinx==8.2.3
4 | sphinxcontrib-towncrier
5 |
--------------------------------------------------------------------------------
/requirements/lint.txt:
--------------------------------------------------------------------------------
1 | pre-commit==4.2.0
2 |
--------------------------------------------------------------------------------
/requirements/test-freethreading.txt:
--------------------------------------------------------------------------------
1 | -r cython-freethreading.txt
2 | -r test-pure.txt
3 |
--------------------------------------------------------------------------------
/requirements/test-pure.txt:
--------------------------------------------------------------------------------
1 | covdefaults
2 | hypothesis>=6.0
3 | idna==3.10
4 | multidict==6.4.4
5 | propcache==0.3.1
6 | pytest==8.4.0
7 | pytest-cov>=2.3.1
8 | pytest-xdist
9 |
--------------------------------------------------------------------------------
/requirements/test.txt:
--------------------------------------------------------------------------------
1 | -r cython.txt
2 | -r test-pure.txt
3 |
--------------------------------------------------------------------------------
/requirements/towncrier.txt:
--------------------------------------------------------------------------------
1 | towncrier==23.11.0
2 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | # wheels should be OS-specific:
3 | # their names must contain macOS/manulinux1/2010/2014/Windows identifiers
4 | universal = 0
5 |
6 | [metadata]
7 | name = yarl
8 | version = attr: yarl.__version__
9 | url = https://github.com/aio-libs/yarl
10 | project_urls =
11 | Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org
12 | Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org
13 | CI: GitHub Workflows = https://github.com/aio-libs/yarl/actions?query=branch:master
14 | Code of Conduct = https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
15 | Coverage: codecov = https://codecov.io/github/aio-libs/yarl
16 | Docs: Changelog = https://yarl.aio-libs.org/en/latest/changes/
17 | Docs: RTD = https://yarl.aio-libs.org
18 | GitHub: issues = https://github.com/aio-libs/yarl/issues
19 | GitHub: repo = https://github.com/aio-libs/yarl
20 | description = Yet another URL library
21 | long_description = file: README.rst, CHANGES.rst
22 | long_description_content_type = text/x-rst
23 | author = Andrew Svetlov
24 | author_email = andrew.svetlov@gmail.com
25 | maintainer = aiohttp team
26 | maintainer_email = team@aiohttp.org
27 | license = Apache-2.0
28 | license_files =
29 | LICENSE
30 | NOTICE
31 | classifiers =
32 | Development Status :: 5 - Production/Stable
33 |
34 | Intended Audience :: Developers
35 |
36 | License :: OSI Approved :: Apache Software License
37 |
38 | Programming Language :: Cython
39 | Programming Language :: Python
40 | Programming Language :: Python :: 3
41 | Programming Language :: Python :: 3.9
42 | Programming Language :: Python :: 3.10
43 | Programming Language :: Python :: 3.11
44 | Programming Language :: Python :: 3.12
45 | Programming Language :: Python :: 3.13
46 |
47 | Topic :: Internet :: WWW/HTTP
48 | Topic :: Software Development :: Libraries :: Python Modules
49 | keywords =
50 | cython
51 | cext
52 | yarl
53 |
54 | [options]
55 | python_requires = >=3.9
56 | # Ref:
57 | # https://setuptools.pypa.io/en/latest/userguide/declarative_config.html#using-a-src-layout
58 | # (`src/` layout)
59 | # package_dir =
60 | # = src
61 | packages =
62 | yarl
63 | # https://setuptools.pypa.io/en/latest/deprecated/zip_safe.html
64 | zip_safe = False
65 | include_package_data = True
66 |
67 | install_requires =
68 | idna >= 2.0
69 | multidict >= 4.0
70 | propcache >= 0.2.1
71 |
72 | [options.package_data]
73 | # Ref:
74 | # https://setuptools.pypa.io/en/latest/userguide/datafiles.html#package-data
75 | # (see notes for the asterisk/`*` meaning)
76 | * =
77 | *.so
78 |
79 | [options.exclude_package_data]
80 | * =
81 | *.c
82 | *.h
83 |
84 | [pep8]
85 | max-line-length=79
86 |
87 | [flake8]
88 | extend-select = B950
89 | ignore = E203,E301,E302,E501,E704,W503,W504,F811
90 | max-line-length = 88
91 |
92 | # Allow certain violations in certain files:
93 | per-file-ignores =
94 |
95 | # F401 imported but unused
96 | packaging/pep517_backend/hooks.py: F401
97 |
98 | [isort]
99 | profile=black
100 |
--------------------------------------------------------------------------------
/tests/test_cache.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import yarl
4 |
5 | # Don't check the actual behavior but make sure that calls are allowed
6 |
7 |
8 | def teardown_module() -> None:
9 | yarl.cache_configure()
10 |
11 |
12 | def test_cache_clear() -> None:
13 | yarl.cache_clear()
14 |
15 |
16 | def test_cache_info() -> None:
17 | info = yarl.cache_info()
18 | assert info.keys() == {
19 | "idna_encode",
20 | "idna_decode",
21 | "ip_address",
22 | "host_validate",
23 | "encode_host",
24 | }
25 |
26 |
27 | def test_cache_configure_default() -> None:
28 | yarl.cache_configure()
29 |
30 |
31 | def test_cache_configure_None() -> None:
32 | yarl.cache_configure(
33 | idna_decode_size=None,
34 | idna_encode_size=None,
35 | encode_host_size=None,
36 | )
37 |
38 |
39 | def test_cache_configure_None_including_deprecated() -> None:
40 | msg = (
41 | r"cache_configure\(\) no longer accepts the ip_address_size "
42 | r"or host_validate_size arguments, they are used to set the "
43 | r"encode_host_size instead and will be removed in the future"
44 | )
45 | with pytest.warns(DeprecationWarning, match=msg):
46 | yarl.cache_configure(
47 | idna_decode_size=None,
48 | idna_encode_size=None,
49 | encode_host_size=None,
50 | ip_address_size=None,
51 | host_validate_size=None,
52 | )
53 | assert yarl.cache_info()["idna_decode"].maxsize is None
54 | assert yarl.cache_info()["idna_encode"].maxsize is None
55 | assert yarl.cache_info()["encode_host"].maxsize is None
56 |
57 |
58 | def test_cache_configure_None_only_deprecated() -> None:
59 | msg = (
60 | r"cache_configure\(\) no longer accepts the ip_address_size "
61 | r"or host_validate_size arguments, they are used to set the "
62 | r"encode_host_size instead and will be removed in the future"
63 | )
64 | with pytest.warns(DeprecationWarning, match=msg):
65 | yarl.cache_configure(
66 | ip_address_size=None,
67 | host_validate_size=None,
68 | )
69 | assert yarl.cache_info()["encode_host"].maxsize is None
70 |
71 |
72 | def test_cache_configure_explicit() -> None:
73 | yarl.cache_configure(
74 | idna_decode_size=128,
75 | idna_encode_size=128,
76 | encode_host_size=128,
77 | )
78 | assert yarl.cache_info()["idna_decode"].maxsize == 128
79 | assert yarl.cache_info()["idna_encode"].maxsize == 128
80 | assert yarl.cache_info()["encode_host"].maxsize == 128
81 |
82 |
83 | def test_cache_configure_waring() -> None:
84 | msg = (
85 | r"cache_configure\(\) no longer accepts the ip_address_size "
86 | r"or host_validate_size arguments, they are used to set the "
87 | r"encode_host_size instead and will be removed in the future"
88 | )
89 | with pytest.warns(DeprecationWarning, match=msg):
90 | yarl.cache_configure(
91 | idna_encode_size=1024,
92 | idna_decode_size=1024,
93 | ip_address_size=1024,
94 | host_validate_size=1024,
95 | )
96 |
97 | assert yarl.cache_info()["encode_host"].maxsize == 1024
98 | with pytest.warns(DeprecationWarning, match=msg):
99 | yarl.cache_configure(host_validate_size=None)
100 |
101 | assert yarl.cache_info()["encode_host"].maxsize is None
102 |
--------------------------------------------------------------------------------
/tests/test_cached_property.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from yarl._url import cached_property # type: ignore[attr-defined]
4 |
5 |
6 | class A:
7 | def __init__(self) -> None:
8 | self._cache: dict[str, int] = {}
9 |
10 | @cached_property
11 | def prop(self) -> int:
12 | """Docstring."""
13 | return 1
14 |
15 |
16 | def test_reify() -> None:
17 | a = A()
18 | assert 1 == a.prop
19 |
20 |
21 | def test_reify_class() -> None:
22 | assert isinstance(A.prop, cached_property)
23 | assert "Docstring." == A.prop.__doc__
24 |
25 |
26 | def test_reify_assignment() -> None:
27 | a = A()
28 |
29 | with pytest.raises(AttributeError):
30 | a.prop = 123
31 |
--------------------------------------------------------------------------------
/tests/test_normalize_path.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from yarl._path import normalize_path
4 |
5 | PATHS = [
6 | # No dots
7 | ("", ""),
8 | ("/", "/"),
9 | ("//", "//"),
10 | ("///", "///"),
11 | ("path", "path"),
12 | # Single-dot
13 | ("path/to", "path/to"),
14 | ("././path/to", "path/to"),
15 | ("path/./to", "path/to"),
16 | ("path/././to", "path/to"),
17 | ("path/to/.", "path/to/"),
18 | ("path/to/./.", "path/to/"),
19 | ("/path/to/.", "/path/to/"),
20 | # Double-dots
21 | ("../path/to", "path/to"),
22 | ("path/../to", "to"),
23 | ("path/../../to", "to"),
24 | # absolute path root / is maintained; tests based on two
25 | # tests from web-platform-tests project's urltestdata.json
26 | ("/foo/../../../ton", "/ton"),
27 | ("/foo/../../../..bar", "/..bar"),
28 | # Non-ASCII characters
29 | ("μονοπάτι/../../να/ᴜɴɪ/ᴄᴏᴅᴇ", "να/ᴜɴɪ/ᴄᴏᴅᴇ"),
30 | ("μονοπάτι/../../να/𝕦𝕟𝕚/𝕔𝕠𝕕𝕖/.", "να/𝕦𝕟𝕚/𝕔𝕠𝕕𝕖/"),
31 | ]
32 |
33 |
34 | @pytest.mark.parametrize("original,expected", PATHS)
35 | def test_normalize_path(original: str, expected: str) -> None:
36 | assert normalize_path(original) == expected
37 |
--------------------------------------------------------------------------------
/tests/test_pickle.py:
--------------------------------------------------------------------------------
1 | import pickle
2 |
3 | from yarl import URL
4 |
5 | # serialize
6 |
7 |
8 | def test_pickle() -> None:
9 | u1 = URL("picklepickle")
10 | hash(u1)
11 | v = pickle.dumps(u1)
12 | u2 = pickle.loads(v)
13 | assert u1._cache
14 | assert not u2._cache
15 | assert hash(u1) == hash(u2)
16 |
17 |
18 | def test_default_style_state() -> None:
19 | u = object.__new__(URL)
20 | val = ("set_state", "set_state", "set_state", "set_state", "set_state")
21 | u.__setstate__((None, {"_val": val}))
22 | assert u._val == val
23 | assert hash(u) != 1
24 |
25 |
26 | def test_empty_url_is_not_cached() -> None:
27 | u = URL.__new__(URL)
28 | val = ("set_state", "set_state", "set_state", "set_state", "set_state")
29 | u.__setstate__((None, {"_val": val}))
30 | assert u._val == val
31 | assert hash(u) != 1
32 |
33 |
34 | def test_pickle_does_not_pollute_cache() -> None:
35 | """Verify the unpickling does not pollute the cache.
36 |
37 | Since unpickle will call URL.__new__ with default
38 | args, we need to make sure that default args never
39 | end up in the pre_encoded_url or encode_url cache.
40 | """
41 | u1 = URL.__new__(URL)
42 | u1._scheme = "this"
43 | u1._netloc = "never.appears.any.where.else.in.tests"
44 | u1._path = ""
45 | u1._query = ""
46 | u1._fragment = ""
47 | hash(u1)
48 | v = pickle.dumps(u1)
49 | u2: URL = pickle.loads(v)
50 | assert u1._cache
51 | assert hash(u1) == hash(u2)
52 | assert u2._scheme == "this"
53 | assert u2._netloc == "never.appears.any.where.else.in.tests"
54 | assert u2._path == ""
55 | assert u2._query == ""
56 | assert u2._fragment == ""
57 | # Verify unpickling did not the cache wrong scheme
58 | # for empty args.
59 | assert URL().scheme == ""
60 | assert URL("").scheme == ""
61 |
--------------------------------------------------------------------------------
/tests/test_quoting_benchmarks.py:
--------------------------------------------------------------------------------
1 | """codspeed benchmark for yarl._quoting module."""
2 |
3 | import pytest
4 |
5 | try:
6 | from pytest_codspeed import BenchmarkFixture
7 | except ImportError: # pragma: no branch # only hit in cibuildwheel
8 | pytestmark = pytest.mark.skip("pytest-codspeed needs to be installed")
9 |
10 | from yarl._quoting import _Quoter, _Unquoter
11 |
12 | QUOTER_SLASH_SAFE = _Quoter(safe="/")
13 | QUOTER = _Quoter()
14 | UNQUOTER = _Unquoter()
15 | QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False)
16 | PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False)
17 |
18 | LONG_PATH = "/path/to" * 100
19 | LONG_QUERY = "a=1&b=2&c=3&d=4&e=5&f=6&g=7&h=8&i=9&j=0" * 25
20 | LONG_QUERY_WITH_PCT = LONG_QUERY + "&d=%25%2F%3F%3A%40%26%3B%3D%2B"
21 |
22 |
23 | def test_quote_query_string(benchmark: "BenchmarkFixture") -> None:
24 | @benchmark
25 | def _run() -> None:
26 | for _ in range(100):
27 | QUERY_QUOTER("a=1&b=2&c=3&d=4&e=5&f=6&g=7&h=8&i=9&j=0")
28 |
29 |
30 | def test_quoter_ascii(benchmark: "BenchmarkFixture") -> None:
31 | @benchmark
32 | def _run() -> None:
33 | for _ in range(100):
34 | QUOTER_SLASH_SAFE("/path/to")
35 |
36 |
37 | def test_quote_long_path(benchmark: "BenchmarkFixture") -> None:
38 | @benchmark
39 | def _run() -> None:
40 | for _ in range(100):
41 | PATH_QUOTER(LONG_PATH)
42 |
43 |
44 | def test_quoter_pct(benchmark: "BenchmarkFixture") -> None:
45 | @benchmark
46 | def _run() -> None:
47 | for _ in range(100):
48 | QUOTER("abc%0a")
49 |
50 |
51 | def test_long_query(benchmark: "BenchmarkFixture") -> None:
52 | @benchmark
53 | def _run() -> None:
54 | for _ in range(100):
55 | QUERY_QUOTER(LONG_QUERY)
56 |
57 |
58 | def test_long_query_with_pct(benchmark: "BenchmarkFixture") -> None:
59 | @benchmark
60 | def _run() -> None:
61 | for _ in range(100):
62 | QUERY_QUOTER(LONG_QUERY_WITH_PCT)
63 |
64 |
65 | def test_quoter_quote_utf8(benchmark: "BenchmarkFixture") -> None:
66 | @benchmark
67 | def _run() -> None:
68 | for _ in range(100):
69 | PATH_QUOTER("/шлях/файл")
70 |
71 |
72 | def test_unquoter_short(benchmark: "BenchmarkFixture") -> None:
73 | @benchmark
74 | def _run() -> None:
75 | for _ in range(100):
76 | UNQUOTER("/path/to")
77 |
78 |
79 | def test_unquoter_long_ascii(benchmark: "BenchmarkFixture") -> None:
80 | @benchmark
81 | def _run() -> None:
82 | for _ in range(100):
83 | UNQUOTER(LONG_QUERY)
84 |
85 |
86 | def test_unquoter_long_pct(benchmark: "BenchmarkFixture") -> None:
87 | @benchmark
88 | def _run() -> None:
89 | for _ in range(100):
90 | UNQUOTER(LONG_QUERY_WITH_PCT)
91 |
--------------------------------------------------------------------------------
/tests/test_url_build.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from yarl import URL
4 |
5 | # build classmethod
6 |
7 |
8 | def test_build_without_arguments() -> None:
9 | u = URL.build()
10 | assert str(u) == ""
11 |
12 |
13 | def test_build_simple() -> None:
14 | u = URL.build(scheme="http", host="127.0.0.1")
15 | assert str(u) == "http://127.0.0.1"
16 |
17 |
18 | def test_url_build_ipv6() -> None:
19 | u = URL.build(scheme="http", host="::1")
20 | assert str(u) == "http://[::1]"
21 |
22 |
23 | def test_url_build_ipv6_brackets_encoded() -> None:
24 | u = URL.build(scheme="http", host="[::1]", encoded=True)
25 | assert str(u) == "http://[::1]"
26 |
27 |
28 | def test_url_build_ipv6_brackets_not_encoded() -> None:
29 | u = URL.build(scheme="http", host="::1", encoded=False)
30 | assert str(u) == "http://[::1]"
31 |
32 |
33 | def test_url_ipv4_in_ipv6() -> None:
34 | u = URL.build(scheme="http", host="2001:db8:122:344::192.0.2.33")
35 | assert str(u) == "http://[2001:db8:122:344::c000:221]"
36 |
37 |
38 | def test_build_with_scheme() -> None:
39 | u = URL.build(scheme="blob", path="path")
40 | assert str(u) == "blob:path"
41 |
42 |
43 | def test_build_with_host() -> None:
44 | u = URL.build(host="127.0.0.1")
45 | assert str(u) == "//127.0.0.1"
46 | assert u == URL("//127.0.0.1")
47 |
48 |
49 | def test_build_with_scheme_and_host() -> None:
50 | u = URL.build(scheme="http", host="127.0.0.1")
51 | assert str(u) == "http://127.0.0.1"
52 | assert u == URL("http://127.0.0.1")
53 |
54 |
55 | @pytest.mark.parametrize(
56 | ("port", "exc", "match"),
57 | [
58 | pytest.param(
59 | 8000,
60 | ValueError,
61 | r"""(?x)
62 | ^
63 | Can't\ build\ URL\ with\ "port"\ but\ without\ "host"\.
64 | $
65 | """,
66 | id="port-only",
67 | ),
68 | pytest.param(
69 | "", TypeError, r"^The port is required to be int, got .*\.$", id="port-str"
70 | ),
71 | ],
72 | )
73 | def test_build_with_port(port: int, exc: type[Exception], match: str) -> None:
74 | with pytest.raises(exc, match=match):
75 | URL.build(port=port)
76 |
77 |
78 | def test_build_with_user() -> None:
79 | u = URL.build(scheme="http", host="127.0.0.1", user="foo")
80 | assert str(u) == "http://foo@127.0.0.1"
81 |
82 |
83 | def test_build_with_user_password() -> None:
84 | u = URL.build(scheme="http", host="127.0.0.1", user="foo", password="bar")
85 | assert str(u) == "http://foo:bar@127.0.0.1"
86 |
87 |
88 | def test_build_with_query_and_query_string() -> None:
89 | with pytest.raises(ValueError):
90 | URL.build(
91 | scheme="http",
92 | host="127.0.0.1",
93 | user="foo",
94 | password="bar",
95 | port=8000,
96 | path="/index.html",
97 | query=dict(arg="value1"),
98 | query_string="arg=value1",
99 | fragment="top",
100 | )
101 |
102 |
103 | def test_build_with_all() -> None:
104 | u = URL.build(
105 | scheme="http",
106 | host="127.0.0.1",
107 | user="foo",
108 | password="bar",
109 | port=8000,
110 | path="/index.html",
111 | query_string="arg=value1",
112 | fragment="top",
113 | )
114 | assert str(u) == "http://foo:bar@127.0.0.1:8000/index.html?arg=value1#top"
115 |
116 |
117 | def test_build_with_authority_and_host() -> None:
118 | with pytest.raises(ValueError):
119 | URL.build(authority="host.com", host="example.com")
120 |
121 |
122 | @pytest.mark.parametrize(
123 | ("host", "is_authority"),
124 | [
125 | ("user:pass@host.com", True),
126 | ("user@host.com", True),
127 | ("host:com", False),
128 | ("not_percent_encoded%Zf", False),
129 | ("still_not_percent_encoded%fZ", False),
130 | *(("other_gen_delim_" + c, False) for c in "/?#[]"),
131 | ],
132 | )
133 | def test_build_with_invalid_host(host: str, is_authority: bool) -> None:
134 | match = r"Host '[^']+' cannot contain '[^']+' \(at position \d+\)"
135 | if is_authority:
136 | match += ", if .* use 'authority' instead of 'host'"
137 | with pytest.raises(ValueError, match=f"{match}$"):
138 | URL.build(host=host)
139 |
140 |
141 | def test_build_with_authority() -> None:
142 | url = URL.build(scheme="http", authority="степан:bar@host.com:8000", path="/path")
143 | assert (
144 | str(url) == "http://%D1%81%D1%82%D0%B5%D0%BF%D0%B0%D0%BD:bar@host.com:8000/path"
145 | )
146 |
147 |
148 | def test_build_with_authority_no_leading_flash() -> None:
149 | msg = r"Path in a URL with authority should start with a slash \('/'\) if set"
150 | with pytest.raises(ValueError, match=msg):
151 | URL.build(scheme="http", authority="степан:bar@host.com:8000", path="path")
152 |
153 |
154 | def test_build_with_authority_without_encoding() -> None:
155 | url = URL.build(
156 | scheme="http", authority="foo:bar@host.com:8000", path="path", encoded=True
157 | )
158 | assert str(url) == "http://foo:bar@host.com:8000/path"
159 |
160 |
161 | def test_build_with_authority_empty_host_no_scheme() -> None:
162 | url = URL.build(authority="", path="path")
163 | assert str(url) == "path"
164 |
165 |
166 | def test_build_with_authority_and_only_user() -> None:
167 | url = URL.build(scheme="https", authority="user:@foo.com", path="/path")
168 | assert str(url) == "https://user:@foo.com/path"
169 |
170 |
171 | def test_build_with_authority_with_port() -> None:
172 | url = URL.build(scheme="https", authority="foo.com:8080", path="/path")
173 | assert str(url) == "https://foo.com:8080/path"
174 |
175 |
176 | def test_build_with_authority_with_ipv6() -> None:
177 | url = URL.build(scheme="https", authority="[::1]", path="/path")
178 | assert str(url) == "https://[::1]/path"
179 |
180 |
181 | def test_build_with_authority_with_ipv6_and_port() -> None:
182 | url = URL.build(scheme="https", authority="[::1]:81", path="/path")
183 | assert str(url) == "https://[::1]:81/path"
184 |
185 |
186 | def test_query_str() -> None:
187 | u = URL.build(scheme="http", host="127.0.0.1", path="/", query_string="arg=value1")
188 | assert str(u) == "http://127.0.0.1/?arg=value1"
189 |
190 |
191 | def test_query_dict() -> None:
192 | u = URL.build(scheme="http", host="127.0.0.1", path="/", query=dict(arg="value1"))
193 |
194 | assert str(u) == "http://127.0.0.1/?arg=value1"
195 |
196 |
197 | def test_build_path_quoting() -> None:
198 | u = URL.build(
199 | scheme="http",
200 | host="127.0.0.1",
201 | path="/фотографія.jpg",
202 | query=dict(arg="Привіт"),
203 | )
204 |
205 | assert u == URL("http://127.0.0.1/фотографія.jpg?arg=Привіт")
206 | assert str(u) == (
207 | "http://127.0.0.1/"
208 | "%D1%84%D0%BE%D1%82%D0%BE%D0%B3%D1%80%D0%B0%D1%84%D1%96%D1%8F.jpg?"
209 | "arg=%D0%9F%D1%80%D0%B8%D0%B2%D1%96%D1%82"
210 | )
211 |
212 |
213 | def test_build_query_quoting() -> None:
214 | u = URL.build(
215 | scheme="http",
216 | host="127.0.0.1",
217 | path="/фотографія.jpg",
218 | query="arg=Привіт",
219 | )
220 |
221 | assert u == URL("http://127.0.0.1/фотографія.jpg?arg=Привіт")
222 | assert str(u) == (
223 | "http://127.0.0.1/"
224 | "%D1%84%D0%BE%D1%82%D0%BE%D0%B3%D1%80%D0%B0%D1%84%D1%96%D1%8F.jpg?"
225 | "arg=%D0%9F%D1%80%D0%B8%D0%B2%D1%96%D1%82"
226 | )
227 |
228 |
229 | def test_build_query_only() -> None:
230 | u = URL.build(query={"key": "value"})
231 |
232 | assert str(u) == "?key=value"
233 |
234 |
235 | def test_build_drop_dots() -> None:
236 | u = URL.build(scheme="http", host="example.com", path="/path/../to")
237 | assert str(u) == "http://example.com/to"
238 |
239 |
240 | def test_build_encode() -> None:
241 | u = URL.build(
242 | scheme="http",
243 | host="оун-упа.укр",
244 | path="/шлях/криївка",
245 | query_string="ключ=знач",
246 | fragment="фраг",
247 | )
248 | expected = (
249 | "http://xn----8sb1bdhvc.xn--j1amh"
250 | "/%D1%88%D0%BB%D1%8F%D1%85/%D0%BA%D1%80%D0%B8%D1%97%D0%B2%D0%BA%D0%B0"
251 | "?%D0%BA%D0%BB%D1%8E%D1%87=%D0%B7%D0%BD%D0%B0%D1%87"
252 | "#%D1%84%D1%80%D0%B0%D0%B3"
253 | )
254 | assert str(u) == expected
255 |
256 |
257 | def test_build_already_encoded() -> None:
258 | # resulting URL is invalid but not encoded
259 | u = URL.build(
260 | scheme="http",
261 | host="оун-упа.укр",
262 | path="/шлях/криївка",
263 | query_string="ключ=знач",
264 | fragment="фраг",
265 | encoded=True,
266 | )
267 | assert str(u) == "http://оун-упа.укр/шлях/криївка?ключ=знач#фраг"
268 |
269 |
270 | def test_build_already_encoded_username_password() -> None:
271 | u = URL.build(
272 | scheme="http",
273 | host="x.org",
274 | path="/x/y/z",
275 | query_string="x=z",
276 | fragment="any",
277 | user="u",
278 | password="p",
279 | encoded=True,
280 | )
281 | assert str(u) == "http://u:p@x.org/x/y/z?x=z#any"
282 | assert u.host_subcomponent == "x.org"
283 |
284 |
285 | def test_build_already_encoded_empty_host() -> None:
286 | u = URL.build(
287 | host="",
288 | path="/x/y/z",
289 | query_string="x=z",
290 | fragment="any",
291 | encoded=True,
292 | )
293 | assert str(u) == "/x/y/z?x=z#any"
294 | assert u.host_subcomponent is None
295 |
296 |
297 | def test_build_percent_encoded() -> None:
298 | u = URL.build(
299 | scheme="http",
300 | host="%2d.org",
301 | user="u%2d",
302 | password="p%2d",
303 | path="/%2d",
304 | query_string="k%2d=v%2d",
305 | fragment="f%2d",
306 | )
307 | assert str(u) == "http://u%252d:p%252d@%2d.org/%252d?k%252d=v%252d#f%252d"
308 | assert u.raw_host == "%2d.org"
309 | assert u.host == "%2d.org"
310 | assert u.raw_user == "u%252d"
311 | assert u.user == "u%2d"
312 | assert u.raw_password == "p%252d"
313 | assert u.password == "p%2d"
314 | assert u.raw_authority == "u%252d:p%252d@%2d.org"
315 | assert u.authority == "u%2d:p%2d@%2d.org:80"
316 | assert u.raw_path == "/%252d"
317 | assert u.path == "/%2d"
318 | assert u.query == {"k%2d": "v%2d"}
319 | assert u.raw_query_string == "k%252d=v%252d"
320 | assert u.query_string == "k%2d=v%2d"
321 | assert u.raw_fragment == "f%252d"
322 | assert u.fragment == "f%2d"
323 |
324 |
325 | def test_build_with_authority_percent_encoded() -> None:
326 | u = URL.build(scheme="http", authority="u%2d:p%2d@%2d.org")
327 | assert str(u) == "http://u%252d:p%252d@%2d.org"
328 | assert u.raw_host == "%2d.org"
329 | assert u.host == "%2d.org"
330 | assert u.raw_user == "u%252d"
331 | assert u.user == "u%2d"
332 | assert u.raw_password == "p%252d"
333 | assert u.password == "p%2d"
334 | assert u.raw_authority == "u%252d:p%252d@%2d.org"
335 | assert u.authority == "u%2d:p%2d@%2d.org:80"
336 |
337 |
338 | def test_build_with_authority_percent_encoded_already_encoded() -> None:
339 | u = URL.build(scheme="http", authority="u%2d:p%2d@%2d.org", encoded=True)
340 | assert str(u) == "http://u%2d:p%2d@%2d.org"
341 | assert u.raw_host == "%2d.org"
342 | assert u.host == "%2d.org"
343 | assert u.user == "u-"
344 | assert u.raw_user == "u%2d"
345 | assert u.password == "p-"
346 | assert u.raw_password == "p%2d"
347 | assert u.authority == "u-:p-@%2d.org:80"
348 | assert u.raw_authority == "u%2d:p%2d@%2d.org"
349 |
350 |
351 | def test_build_with_authority_with_path_with_leading_slash() -> None:
352 | u = URL.build(scheme="http", host="example.com", path="/path_with_leading_slash")
353 | assert str(u) == "http://example.com/path_with_leading_slash"
354 |
355 |
356 | def test_build_with_authority_with_empty_path() -> None:
357 | u = URL.build(scheme="http", host="example.com", path="")
358 | assert str(u) == "http://example.com"
359 |
360 |
361 | def test_build_with_authority_with_path_without_leading_slash() -> None:
362 | with pytest.raises(ValueError):
363 | URL.build(scheme="http", host="example.com", path="path_without_leading_slash")
364 |
365 |
366 | def test_build_with_none_host() -> None:
367 | with pytest.raises(TypeError, match="NoneType is illegal for.*host"):
368 | URL.build(scheme="http", host=None) # type: ignore[arg-type]
369 |
370 |
371 | def test_build_with_none_path() -> None:
372 | with pytest.raises(TypeError):
373 | URL.build(scheme="http", host="example.com", path=None) # type: ignore[arg-type]
374 |
375 |
376 | def test_build_with_none_query_string() -> None:
377 | with pytest.raises(TypeError):
378 | URL.build(scheme="http", host="example.com", query_string=None) # type: ignore[arg-type]
379 |
380 |
381 | def test_build_with_none_fragment() -> None:
382 | with pytest.raises(TypeError):
383 | URL.build(scheme="http", host="example.com", fragment=None) # type: ignore[arg-type]
384 |
385 |
386 | def test_build_uppercase_host() -> None:
387 | u = URL.build(
388 | host="UPPER.case",
389 | encoded=False,
390 | )
391 | assert u.host == "upper.case"
392 |
--------------------------------------------------------------------------------
/tests/test_url_cmp_and_hash.py:
--------------------------------------------------------------------------------
1 | from yarl import URL
2 |
3 | # comparison and hashing
4 |
5 |
6 | def test_ne_str() -> None:
7 | url = URL("http://example.com/")
8 | assert url != "http://example.com/"
9 |
10 |
11 | def test_eq() -> None:
12 | url = URL("http://example.com/")
13 | assert url == URL("http://example.com/")
14 |
15 |
16 | def test_hash() -> None:
17 | assert hash(URL("http://example.com/")) == hash(URL("http://example.com/"))
18 |
19 |
20 | def test_hash_double_call() -> None:
21 | url = URL("http://example.com/")
22 | assert hash(url) == hash(url)
23 |
24 |
25 | def test_le_less() -> None:
26 | url1 = URL("http://example1.com/")
27 | url2 = URL("http://example2.com/")
28 |
29 | assert url1 <= url2
30 |
31 |
32 | def test_le_eq() -> None:
33 | url1 = URL("http://example.com/")
34 | url2 = URL("http://example.com/")
35 |
36 | assert url1 <= url2
37 |
38 |
39 | def test_le_not_implemented() -> None:
40 | url = URL("http://example1.com/")
41 |
42 | assert url.__le__(123) is NotImplemented
43 |
44 |
45 | def test_lt() -> None:
46 | url1 = URL("http://example1.com/")
47 | url2 = URL("http://example2.com/")
48 |
49 | assert url1 < url2
50 |
51 |
52 | def test_lt_not_implemented() -> None:
53 | url = URL("http://example1.com/")
54 |
55 | assert url.__lt__(123) is NotImplemented
56 |
57 |
58 | def test_ge_more() -> None:
59 | url1 = URL("http://example1.com/")
60 | url2 = URL("http://example2.com/")
61 |
62 | assert url2 >= url1
63 |
64 |
65 | def test_ge_eq() -> None:
66 | url1 = URL("http://example.com/")
67 | url2 = URL("http://example.com/")
68 |
69 | assert url2 >= url1
70 |
71 |
72 | def test_ge_not_implemented() -> None:
73 | url = URL("http://example1.com/")
74 |
75 | assert url.__ge__(123) is NotImplemented
76 |
77 |
78 | def test_gt() -> None:
79 | url1 = URL("http://example1.com/")
80 | url2 = URL("http://example2.com/")
81 |
82 | assert url2 > url1
83 |
84 |
85 | def test_gt_not_implemented() -> None:
86 | url = URL("http://example1.com/")
87 |
88 | assert url.__gt__(123) is NotImplemented
89 |
--------------------------------------------------------------------------------
/tests/test_url_query.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Sequence
2 | from urllib.parse import parse_qs, urlencode
3 |
4 | import pytest
5 | from multidict import MultiDict, MultiDictProxy
6 |
7 | from yarl import URL
8 |
9 | # ========================================
10 | # Basic chars in query values
11 | # ========================================
12 |
13 | URLS_WITH_BASIC_QUERY_VALUES: list[tuple[URL, MultiDict[str]]] = [
14 | # Empty strings, keys and values
15 | (
16 | URL("http://example.com"),
17 | MultiDict(),
18 | ),
19 | (
20 | URL("http://example.com?a="),
21 | MultiDict([("a", "")]),
22 | ),
23 | # ASCII chars
24 | (
25 | URL("http://example.com?a+b=c+d"),
26 | MultiDict({"a b": "c d"}),
27 | ),
28 | (
29 | URL("http://example.com?a=1&b=2"),
30 | MultiDict([("a", "1"), ("b", "2")]),
31 | ),
32 | (
33 | URL("http://example.com?a=1&b=2&a=3"),
34 | MultiDict([("a", "1"), ("b", "2"), ("a", "3")]),
35 | ),
36 | # Non-ASCI BMP chars
37 | (
38 | URL("http://example.com?ключ=знач"),
39 | MultiDict({"ключ": "знач"}),
40 | ),
41 | (
42 | URL("http://example.com?foo=ᴜɴɪᴄᴏᴅᴇ"),
43 | MultiDict({"foo": "ᴜɴɪᴄᴏᴅᴇ"}),
44 | ),
45 | # Non-BMP chars
46 | (
47 | URL("http://example.com?bar=𝕦𝕟𝕚𝕔𝕠𝕕𝕖"),
48 | MultiDict({"bar": "𝕦𝕟𝕚𝕔𝕠𝕕𝕖"}),
49 | ),
50 | ]
51 |
52 |
53 | @pytest.mark.parametrize(
54 | "original_url, expected_query",
55 | URLS_WITH_BASIC_QUERY_VALUES,
56 | )
57 | def test_query_basic_parsing(original_url: URL, expected_query: MultiDict[str]) -> None:
58 | assert isinstance(original_url.query, MultiDictProxy)
59 | assert original_url.query == expected_query
60 |
61 |
62 | @pytest.mark.parametrize(
63 | "original_url, expected_query",
64 | URLS_WITH_BASIC_QUERY_VALUES,
65 | )
66 | def test_query_basic_update_query(
67 | original_url: URL, expected_query: MultiDict[str]
68 | ) -> None:
69 | new_url = original_url.update_query({})
70 | assert new_url == original_url
71 |
72 |
73 | def test_query_dont_unqoute_twice() -> None:
74 | sample_url = "http://base.place?" + urlencode({"a": "/////"})
75 | query = urlencode({"url": sample_url})
76 | full_url = "http://test_url.aha?" + query
77 |
78 | url = URL(full_url)
79 | assert url.query["url"] == sample_url
80 |
81 |
82 | # ========================================
83 | # Reserved chars in query values
84 | # ========================================
85 |
86 | # See https://github.com/python/cpython#87133, which introduced a new
87 | # `separator` keyword argument to `urllib.parse.parse_qs` (among others).
88 | # If the name doesn't exist as a variable in the function bytecode, the
89 | # test is expected to fail.
90 | _SEMICOLON_XFAIL = pytest.mark.xfail(
91 | condition="separator" not in parse_qs.__code__.co_varnames,
92 | reason=(
93 | "Python versions < 3.9.2 lack a fix for "
94 | 'CVE-2021-23336 dropping ";" as a valid query parameter separator, '
95 | "making this test fail."
96 | ),
97 | strict=True,
98 | )
99 |
100 |
101 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES = [
102 | # Ampersand
103 | (URL("http://127.0.0.1/?a=10&b=20"), 2, "10"),
104 | (URL("http://127.0.0.1/?a=10%26b=20"), 1, "10&b=20"),
105 | (URL("http://127.0.0.1/?a=10%3Bb=20"), 1, "10;b=20"),
106 | # Semicolon, which is *not* a query parameter separator as of RFC3986
107 | (URL("http://127.0.0.1/?a=10;b=20"), 1, "10;b=20"),
108 | (URL("http://127.0.0.1/?a=10%26b=20"), 1, "10&b=20"),
109 | (URL("http://127.0.0.1/?a=10%3Bb=20"), 1, "10;b=20"),
110 | ]
111 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL = [
112 | # Ampersand
113 | *URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[:3],
114 | # Semicolon, which is *not* a query parameter separator as of RFC3986
115 | # Mark the first of these as expecting to fail on old Python patch releases.
116 | pytest.param(*URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[3], marks=_SEMICOLON_XFAIL),
117 | *URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES[4:],
118 | ]
119 |
120 |
121 | @pytest.mark.parametrize(
122 | "original_url, expected_query_len, expected_value_a",
123 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL,
124 | )
125 | def test_query_separators_from_parsing(
126 | original_url: URL,
127 | expected_query_len: int,
128 | expected_value_a: str,
129 | ) -> None:
130 | assert len(original_url.query) == expected_query_len
131 | assert original_url.query["a"] == expected_value_a
132 |
133 |
134 | @pytest.mark.parametrize(
135 | "original_url, expected_query_len, expected_value_a",
136 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES_W_XFAIL,
137 | )
138 | def test_query_separators_from_update_query(
139 | original_url: URL,
140 | expected_query_len: int,
141 | expected_value_a: str,
142 | ) -> None:
143 | new_url = original_url.update_query({"c": expected_value_a})
144 | assert new_url.query["a"] == expected_value_a
145 | assert new_url.query["c"] == expected_value_a
146 |
147 |
148 | @pytest.mark.parametrize(
149 | "original_url, expected_query_len, expected_value_a",
150 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES,
151 | )
152 | def test_query_separators_from_with_query(
153 | original_url: URL,
154 | expected_query_len: int,
155 | expected_value_a: str,
156 | ) -> None:
157 | new_url = original_url.with_query({"c": expected_value_a})
158 | assert new_url.query["c"] == expected_value_a
159 |
160 |
161 | @pytest.mark.parametrize(
162 | "original_url, expected_query_len, expected_value_a",
163 | URLS_WITH_RESERVED_CHARS_IN_QUERY_VALUES,
164 | )
165 | def test_query_from_empty_update_query(
166 | original_url: URL,
167 | expected_query_len: int,
168 | expected_value_a: str,
169 | ) -> None:
170 | new_url = original_url.update_query({})
171 |
172 | assert new_url.query["a"] == original_url.query["a"]
173 |
174 | if "b" in original_url.query:
175 | assert new_url.query["b"] == original_url.query["b"]
176 |
177 |
178 | @pytest.mark.parametrize(
179 | ("original_query_string", "keys_to_drop", "expected_query_string"),
180 | [
181 | ("a=10&b=M%C3%B9a+xu%C3%A2n&u%E1%BB%91ng=cafe", ["a"], "b=Mùa xuân&uống=cafe"),
182 | ("a=10&b=M%C3%B9a+xu%C3%A2n", ["b"], "a=10"),
183 | ("a=10&b=M%C3%B9a+xu%C3%A2n&c=30", ["b"], "a=10&c=30"),
184 | (
185 | "a=10&b=M%C3%B9a+xu%C3%A2n&u%E1%BB%91ng=cafe",
186 | ["uống"],
187 | "a=10&b=Mùa xuân",
188 | ),
189 | ("a=10&b=M%C3%B9a+xu%C3%A2n", ["a", "b"], ""),
190 | ],
191 | )
192 | def test_without_query_params(
193 | original_query_string: str, keys_to_drop: Sequence[str], expected_query_string: str
194 | ) -> None:
195 | url = URL(f"http://example.com?{original_query_string}")
196 | new_url = url.without_query_params(*keys_to_drop)
197 | assert new_url.query_string == expected_query_string
198 | assert new_url is not url
199 |
200 |
201 | @pytest.mark.parametrize(
202 | ("original_query_string", "keys_to_drop"),
203 | [
204 | ("a=10&b=M%C3%B9a+xu%C3%A2n&c=30", ["invalid_key"]),
205 | ("a=10&b=M%C3%B9a+xu%C3%A2n", []),
206 | ],
207 | )
208 | def test_skip_dropping_query_params(
209 | original_query_string: str, keys_to_drop: Sequence[str]
210 | ) -> None:
211 | url = URL(f"http://example.com?{original_query_string}")
212 | new_url = url.without_query_params(*keys_to_drop)
213 | assert new_url is url
214 |
215 |
216 | def test_update_query_rejects_bytes() -> None:
217 | url = URL("http://example.com")
218 | with pytest.raises(TypeError):
219 | url.update_query(b"foo=bar") # type: ignore[arg-type]
220 |
221 |
222 | def test_update_query_rejects_bytearray() -> None:
223 | url = URL("http://example.com")
224 | with pytest.raises(TypeError):
225 | url.update_query(bytearray(b"foo=bar")) # type: ignore[arg-type]
226 |
227 |
228 | def test_update_query_rejects_memoryview() -> None:
229 | url = URL("http://example.com")
230 | with pytest.raises(TypeError):
231 | url.update_query(memoryview(b"foo=bar"))
232 |
233 |
234 | def test_update_query_rejects_invalid_type() -> None:
235 | url = URL("http://example.com")
236 | with pytest.raises(TypeError):
237 | url.update_query(42) # type: ignore[call-overload]
238 |
239 |
240 | def test_update_query_with_sequence_of_pairs() -> None:
241 | url = URL("http://example.com")
242 | new_url = url.update_query([("a", "1"), ("b", "2")])
243 | assert new_url.query == MultiDict([("a", "1"), ("b", "2")])
244 | assert new_url.query_string == "a=1&b=2"
245 |
--------------------------------------------------------------------------------
/tests/test_url_update_netloc.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from yarl import URL
4 |
5 | # with_*
6 |
7 |
8 | def test_with_scheme() -> None:
9 | url = URL("http://example.com")
10 | assert str(url.with_scheme("https")) == "https://example.com"
11 |
12 |
13 | def test_with_scheme_uppercased() -> None:
14 | url = URL("http://example.com")
15 | assert str(url.with_scheme("HTTPS")) == "https://example.com"
16 |
17 |
18 | @pytest.mark.parametrize(
19 | ("scheme"),
20 | [
21 | ("http"),
22 | ("https"),
23 | ("HTTP"),
24 | ],
25 | )
26 | def test_with_scheme_for_relative_url(scheme: str) -> None:
27 | """Test scheme can be set for relative URL."""
28 | lower_scheme = scheme.lower()
29 | msg = (
30 | "scheme replacement is not allowed for "
31 | f"relative URLs for the {lower_scheme} scheme"
32 | )
33 | with pytest.raises(ValueError, match=msg):
34 | assert URL("path/to").with_scheme(scheme)
35 |
36 |
37 | def test_with_scheme_for_relative_file_url() -> None:
38 | """Test scheme can be set for relative file URL."""
39 | expected = URL("file:///absolute/path")
40 | assert expected.with_scheme("file") == expected
41 |
42 |
43 | def test_with_scheme_invalid_type() -> None:
44 | url = URL("http://example.com")
45 | with pytest.raises(TypeError):
46 | assert str(url.with_scheme(123)) # type: ignore[arg-type]
47 |
48 |
49 | def test_with_user() -> None:
50 | url = URL("http://example.com")
51 | assert str(url.with_user("john")) == "http://john@example.com"
52 |
53 |
54 | def test_with_user_non_ascii() -> None:
55 | url = URL("http://example.com")
56 | url2 = url.with_user("бажан")
57 | assert url2.raw_user == "%D0%B1%D0%B0%D0%B6%D0%B0%D0%BD"
58 | assert url2.user == "бажан"
59 | assert url2.raw_authority == "%D0%B1%D0%B0%D0%B6%D0%B0%D0%BD@example.com"
60 | assert url2.authority == "бажан@example.com:80"
61 |
62 |
63 | def test_with_user_percent_encoded() -> None:
64 | url = URL("http://example.com")
65 | url2 = url.with_user("%cf%80")
66 | assert url2.raw_user == "%25cf%2580"
67 | assert url2.user == "%cf%80"
68 | assert url2.raw_authority == "%25cf%2580@example.com"
69 | assert url2.authority == "%cf%80@example.com:80"
70 |
71 |
72 | def test_with_user_for_relative_url() -> None:
73 | with pytest.raises(ValueError):
74 | URL("path/to").with_user("user")
75 |
76 |
77 | def test_with_user_invalid_type() -> None:
78 | url = URL("http://example.com:123")
79 | with pytest.raises(TypeError):
80 | url.with_user(123) # type: ignore[arg-type]
81 |
82 |
83 | def test_with_user_None() -> None:
84 | url = URL("http://john@example.com")
85 | assert str(url.with_user(None)) == "http://example.com"
86 |
87 |
88 | def test_with_user_ipv6() -> None:
89 | url = URL("http://john:pass@[::1]:8080/")
90 | assert str(url.with_user(None)) == "http://[::1]:8080/"
91 |
92 |
93 | def test_with_user_None_when_password_present() -> None:
94 | url = URL("http://john:pass@example.com")
95 | assert str(url.with_user(None)) == "http://example.com"
96 |
97 |
98 | def test_with_password() -> None:
99 | url = URL("http://john@example.com")
100 | assert str(url.with_password("pass")) == "http://john:pass@example.com"
101 |
102 |
103 | def test_with_password_ipv6() -> None:
104 | url = URL("http://john:pass@[::1]:8080/")
105 | assert str(url.with_password(None)) == "http://john@[::1]:8080/"
106 |
107 |
108 | def test_with_password_non_ascii() -> None:
109 | url = URL("http://john@example.com")
110 | url2 = url.with_password("пароль")
111 | assert url2.raw_password == "%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C"
112 | assert url2.password == "пароль"
113 | assert url2.raw_authority == "john:%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C@example.com"
114 | assert url2.authority == "john:пароль@example.com:80"
115 |
116 |
117 | def test_with_password_percent_encoded() -> None:
118 | url = URL("http://john@example.com")
119 | url2 = url.with_password("%cf%80")
120 | assert url2.raw_password == "%25cf%2580"
121 | assert url2.password == "%cf%80"
122 | assert url2.raw_authority == "john:%25cf%2580@example.com"
123 | assert url2.authority == "john:%cf%80@example.com:80"
124 |
125 |
126 | def test_with_password_non_ascii_with_colon() -> None:
127 | url = URL("http://john@example.com")
128 | url2 = url.with_password("п:а")
129 | assert url2.raw_password == "%D0%BF%3A%D0%B0"
130 | assert url2.password == "п:а"
131 |
132 |
133 | def test_with_password_for_relative_url() -> None:
134 | with pytest.raises(ValueError):
135 | URL("path/to").with_password("pass")
136 |
137 |
138 | def test_with_password_None() -> None:
139 | url = URL("http://john:pass@example.com")
140 | assert str(url.with_password(None)) == "http://john@example.com"
141 |
142 |
143 | def test_with_password_invalid_type() -> None:
144 | url = URL("http://example.com:123")
145 | with pytest.raises(TypeError):
146 | url.with_password(123) # type: ignore[arg-type]
147 |
148 |
149 | def test_with_password_and_empty_user() -> None:
150 | url = URL("http://example.com")
151 | url2 = url.with_password("pass")
152 | assert url2.password == "pass"
153 | assert url2.user is None
154 | assert str(url2) == "http://:pass@example.com"
155 |
156 |
157 | def test_from_str_with_host_ipv4() -> None:
158 | url = URL("http://host:80")
159 | url = url.with_host("192.168.1.1")
160 | assert url.raw_host == "192.168.1.1"
161 |
162 |
163 | def test_from_str_with_host_ipv6() -> None:
164 | url = URL("http://host:80")
165 | url = url.with_host("::1")
166 | assert url.raw_host == "::1"
167 |
168 |
169 | def test_with_host() -> None:
170 | url = URL("http://example.com:123")
171 | assert str(url.with_host("example.org")) == "http://example.org:123"
172 |
173 |
174 | def test_with_host_empty() -> None:
175 | url = URL("http://example.com:123")
176 | with pytest.raises(ValueError):
177 | url.with_host("")
178 |
179 |
180 | def test_with_host_non_ascii() -> None:
181 | url = URL("http://example.com:123")
182 | url2 = url.with_host("оун-упа.укр")
183 | assert url2.raw_host == "xn----8sb1bdhvc.xn--j1amh"
184 | assert url2.host == "оун-упа.укр"
185 | assert url2.raw_authority == "xn----8sb1bdhvc.xn--j1amh:123"
186 | assert url2.authority == "оун-упа.укр:123"
187 |
188 |
189 | @pytest.mark.parametrize(
190 | ("host", "is_authority"),
191 | [
192 | ("user:pass@host.com", True),
193 | ("user@host.com", True),
194 | ("host:com", False),
195 | ("not_percent_encoded%Zf", False),
196 | ("still_not_percent_encoded%fZ", False),
197 | *(("other_gen_delim_" + c, False) for c in "/?#[]"),
198 | ],
199 | )
200 | def test_with_invalid_host(host: str, is_authority: bool) -> None:
201 | url = URL("http://example.com:123")
202 | match = r"Host '[^']+' cannot contain '[^']+' \(at position \d+\)"
203 | if is_authority:
204 | match += ", if .* use 'authority' instead of 'host'"
205 | with pytest.raises(ValueError, match=f"{match}$"):
206 | url.with_host(host=host)
207 |
208 |
209 | def test_with_host_percent_encoded() -> None:
210 | url = URL("http://%25cf%2580%cf%80:%25cf%2580%cf%80@example.com:123")
211 | url2 = url.with_host("%cf%80.org")
212 | assert url2.raw_host == "%cf%80.org"
213 | assert url2.host == "%cf%80.org"
214 | assert url2.raw_authority == "%25cf%2580%CF%80:%25cf%2580%CF%80@%cf%80.org:123"
215 | assert url2.authority == "%cf%80π:%cf%80π@%cf%80.org:123"
216 |
217 |
218 | def test_with_host_for_relative_url() -> None:
219 | with pytest.raises(ValueError):
220 | URL("path/to").with_host("example.com")
221 |
222 |
223 | def test_with_host_invalid_type() -> None:
224 | url = URL("http://example.com:123")
225 | with pytest.raises(TypeError):
226 | url.with_host(None) # type: ignore[arg-type]
227 |
228 |
229 | def test_with_port() -> None:
230 | url = URL("http://example.com")
231 | assert str(url.with_port(8888)) == "http://example.com:8888"
232 |
233 |
234 | def test_with_default_port_normalization() -> None:
235 | url = URL("http://example.com")
236 | assert str(url.with_scheme("https")) == "https://example.com"
237 | assert str(url.with_scheme("https").with_port(443)) == "https://example.com"
238 | assert str(url.with_port(443).with_scheme("https")) == "https://example.com"
239 |
240 |
241 | def test_with_custom_port_normalization() -> None:
242 | url = URL("http://example.com")
243 | u88 = url.with_port(88)
244 | assert str(u88) == "http://example.com:88"
245 | assert str(u88.with_port(80)) == "http://example.com"
246 | assert str(u88.with_scheme("https")) == "https://example.com:88"
247 |
248 |
249 | def test_with_explicit_port_normalization() -> None:
250 | url = URL("http://example.com")
251 | u80 = url.with_port(80)
252 | assert str(u80) == "http://example.com"
253 | assert str(u80.with_port(81)) == "http://example.com:81"
254 | assert str(u80.with_scheme("https")) == "https://example.com:80"
255 |
256 |
257 | def test_with_port_with_no_port() -> None:
258 | url = URL("http://example.com")
259 | assert str(url.with_port(None)) == "http://example.com"
260 |
261 |
262 | def test_with_port_ipv6() -> None:
263 | url = URL("http://[::1]:8080/")
264 | assert str(url.with_port(81)) == "http://[::1]:81/"
265 |
266 |
267 | def test_with_port_keeps_query_and_fragment() -> None:
268 | url = URL("http://example.com/?a=1#frag")
269 | assert str(url.with_port(8888)) == "http://example.com:8888/?a=1#frag"
270 |
271 |
272 | def test_with_port_percent_encoded() -> None:
273 | url = URL("http://user%name:pass%word@example.com/")
274 | assert str(url.with_port(808)) == "http://user%25name:pass%25word@example.com:808/"
275 |
276 |
277 | def test_with_port_for_relative_url() -> None:
278 | with pytest.raises(ValueError):
279 | URL("path/to").with_port(1234)
280 |
281 |
282 | def test_with_port_invalid_type() -> None:
283 | with pytest.raises(TypeError):
284 | URL("http://example.com").with_port("123") # type: ignore[arg-type]
285 | with pytest.raises(TypeError):
286 | URL("http://example.com").with_port(True)
287 |
288 |
289 | def test_with_port_invalid_range() -> None:
290 | with pytest.raises(ValueError):
291 | URL("http://example.com").with_port(-1)
292 |
--------------------------------------------------------------------------------
/towncrier.toml:
--------------------------------------------------------------------------------
1 | [tool.towncrier]
2 | package = "yarl"
3 | filename = "CHANGES.rst"
4 | directory = "CHANGES/"
5 | title_format = "v{version}"
6 | template = "CHANGES/.TEMPLATE.rst"
7 | issue_format = "{issue}"
8 |
9 | # NOTE: The types are declared because:
10 | # NOTE: - there is no mechanism to override just the value of
11 | # NOTE: `tool.towncrier.type.misc.showcontent`;
12 | # NOTE: - and, we want to declare extra non-default types for
13 | # NOTE: clarity and flexibility.
14 |
15 | [[tool.towncrier.section]]
16 | path = ""
17 |
18 | [[tool.towncrier.type]]
19 | # Something we deemed an improper undesired behavior that got corrected
20 | # in the release to match pre-agreed expectations.
21 | directory = "bugfix"
22 | name = "Bug fixes"
23 | showcontent = true
24 |
25 | [[tool.towncrier.type]]
26 | # New behaviors, public APIs. That sort of stuff.
27 | directory = "feature"
28 | name = "Features"
29 | showcontent = true
30 |
31 | [[tool.towncrier.type]]
32 | # Declarations of future API removals and breaking changes in behavior.
33 | directory = "deprecation"
34 | name = "Deprecations (removal in next major release)"
35 | showcontent = true
36 |
37 | [[tool.towncrier.type]]
38 | # When something public gets removed in a breaking way. Could be
39 | # deprecated in an earlier release.
40 | directory = "breaking"
41 | name = "Removals and backward incompatible breaking changes"
42 | showcontent = true
43 |
44 | [[tool.towncrier.type]]
45 | # Notable updates to the documentation structure or build process.
46 | directory = "doc"
47 | name = "Improved documentation"
48 | showcontent = true
49 |
50 | [[tool.towncrier.type]]
51 | # Notes for downstreams about unobvious side effects and tooling. Changes
52 | # in the test invocation considerations and runtime assumptions.
53 | directory = "packaging"
54 | name = "Packaging updates and notes for downstreams"
55 | showcontent = true
56 |
57 | [[tool.towncrier.type]]
58 | # Stuff that affects the contributor experience. e.g. Running tests,
59 | # building the docs, setting up the development environment.
60 | directory = "contrib"
61 | name = "Contributor-facing changes"
62 | showcontent = true
63 |
64 | [[tool.towncrier.type]]
65 | # Changes that are hard to assign to any of the above categories.
66 | directory = "misc"
67 | name = "Miscellaneous internal changes"
68 | showcontent = true
69 |
--------------------------------------------------------------------------------
/url_benchmark.py:
--------------------------------------------------------------------------------
1 | import timeit
2 |
3 | from yarl import URL
4 |
5 | MANY_HOSTS = [f"www.domain{i}.tld" for i in range(10000)]
6 | MANY_URLS = [f"https://www.domain{i}.tld" for i in range(10000)]
7 | BASE_URL = URL("http://www.domain.tld")
8 | QUERY_URL = URL("http://www.domain.tld?query=1&query=2&query=3&query=4&query=5")
9 | URL_WITH_PATH = URL("http://www.domain.tld/req")
10 |
11 | print(
12 | "Build URL with host and path and port: {:.3f} sec".format(
13 | timeit.timeit(
14 | "URL.build(host='www.domain.tld', path='/req', port=1234)",
15 | globals={"URL": URL},
16 | number=100000,
17 | )
18 | )
19 | )
20 |
21 | print(
22 | "Build encoded URL with host and path and port: {:.3f} sec".format(
23 | timeit.timeit(
24 | "URL.build(host='www.domain.tld', path='/req', port=1234, encoded=True)",
25 | globals={"URL": URL},
26 | number=100000,
27 | )
28 | )
29 | )
30 |
31 | print(
32 | "Build URL with host: {:.3f} sec".format(
33 | timeit.timeit("URL.build(host='domain')", globals={"URL": URL}, number=100000)
34 | )
35 | )
36 |
37 | print(
38 | "Build URL with different hosts: {:.3f} sec".format(
39 | timeit.timeit(
40 | "for host in hosts: URL.build(host=host)",
41 | globals={"URL": URL, "hosts": MANY_HOSTS},
42 | number=10,
43 | )
44 | )
45 | )
46 |
47 | print(
48 | "Build URL with host and port: {:.3f} sec".format(
49 | timeit.timeit(
50 | "URL.build(host='www.domain.tld', port=1234)",
51 | globals={"URL": URL},
52 | number=100000,
53 | )
54 | )
55 | )
56 |
57 | print(
58 | "Make URL with host and path and port: {:.3f} sec".format(
59 | timeit.timeit(
60 | "URL('http://www.domain.tld:1234/req')", globals={"URL": URL}, number=100000
61 | )
62 | )
63 | )
64 |
65 | print(
66 | "Make encoded URL with host and path and port: {:.3f} sec".format(
67 | timeit.timeit(
68 | "URL('http://www.domain.tld:1234/req', encoded=True)",
69 | globals={"URL": URL},
70 | number=100000,
71 | )
72 | )
73 | )
74 |
75 | print(
76 | "Make URL with host and path: {:.3f} sec".format(
77 | timeit.timeit(
78 | "URL('http://www.domain.tld/req')", globals={"URL": URL}, number=100000
79 | )
80 | )
81 | )
82 |
83 | print(
84 | "Make URL with many hosts: {:.3f} sec".format(
85 | timeit.timeit(
86 | "for url in urls: URL(url)",
87 | globals={"URL": URL, "urls": MANY_URLS},
88 | number=10,
89 | )
90 | )
91 | )
92 |
93 |
94 | print(
95 | "Make URL with IPv4 Address and path and port: {:.3f} sec".format(
96 | timeit.timeit(
97 | "URL('http://127.0.0.1:1234/req')", globals={"URL": URL}, number=100000
98 | )
99 | )
100 | )
101 |
102 |
103 | print(
104 | "Make URL with IPv4 Address and path: {:.3f} sec".format(
105 | timeit.timeit(
106 | "URL('http://127.0.0.1/req')", globals={"URL": URL}, number=100000
107 | )
108 | )
109 | )
110 |
111 |
112 | print(
113 | "Make URL with IPv6 Address and path and port: {:.3f} sec".format(
114 | timeit.timeit(
115 | "URL('http://[::1]:1234/req')", globals={"URL": URL}, number=100000
116 | )
117 | )
118 | )
119 |
120 |
121 | print(
122 | "Make URL with IPv6 Address and path: {:.3f} sec".format(
123 | timeit.timeit("URL('http://[::1]/req')", globals={"URL": URL}, number=100000)
124 | )
125 | )
126 |
127 |
128 | print(
129 | "Make URL with query mapping: {:.3f} sec".format(
130 | timeit.timeit(
131 | "base_url.with_query("
132 | "{'a':'1','b':'2','c':'3','d':'4','e':'5'"
133 | ",'f':'6','g':'7','h':'8','i':'9','j':'10'}"
134 | ")",
135 | globals={"base_url": BASE_URL, "URL": URL},
136 | number=100000,
137 | )
138 | )
139 | )
140 |
141 |
142 | print(
143 | "Make URL with query sequence mapping: {:.3f} sec".format(
144 | timeit.timeit(
145 | "".join(
146 | [
147 | "base_url.with_query({",
148 | *[
149 | f"'{i}':('1','2','3','4','5','6','7','8','9','10'),"
150 | for i in range(10)
151 | ],
152 | "})",
153 | ]
154 | ),
155 | globals={"base_url": BASE_URL, "URL": URL},
156 | number=100000,
157 | )
158 | )
159 | )
160 |
161 |
162 | print(
163 | "Convert URL to string: {:.3f} sec".format(
164 | timeit.timeit(
165 | "str(base_url)",
166 | globals={"base_url": BASE_URL, "URL": URL},
167 | number=100000,
168 | )
169 | )
170 | )
171 |
172 |
173 | print(
174 | "Convert URL with path to string: {:.3f} sec".format(
175 | timeit.timeit(
176 | "str(url_with_path)",
177 | globals={"url_with_path": URL_WITH_PATH, "URL": URL},
178 | number=100000,
179 | )
180 | )
181 | )
182 |
183 |
184 | print(
185 | "Convert URL with query to string: {:.3f} sec".format(
186 | timeit.timeit(
187 | "str(query_url)",
188 | globals={"query_url": QUERY_URL, "URL": URL},
189 | number=100000,
190 | )
191 | )
192 | )
193 |
--------------------------------------------------------------------------------
/yarl/__init__.py:
--------------------------------------------------------------------------------
1 | from ._query import Query, QueryVariable, SimpleQuery
2 | from ._url import URL, cache_clear, cache_configure, cache_info
3 |
4 | __version__ = "1.20.1.dev0"
5 |
6 | __all__ = (
7 | "URL",
8 | "SimpleQuery",
9 | "QueryVariable",
10 | "Query",
11 | "cache_clear",
12 | "cache_configure",
13 | "cache_info",
14 | )
15 |
--------------------------------------------------------------------------------
/yarl/_parse.py:
--------------------------------------------------------------------------------
1 | """URL parsing utilities."""
2 |
3 | import re
4 | import unicodedata
5 | from functools import lru_cache
6 | from typing import Union
7 | from urllib.parse import scheme_chars, uses_netloc
8 |
9 | from ._quoters import QUOTER, UNQUOTER_PLUS
10 |
11 | # Leading and trailing C0 control and space to be stripped per WHATWG spec.
12 | # == "".join([chr(i) for i in range(0, 0x20 + 1)])
13 | WHATWG_C0_CONTROL_OR_SPACE = (
14 | "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10"
15 | "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f "
16 | )
17 |
18 | # Unsafe bytes to be removed per WHATWG spec
19 | UNSAFE_URL_BYTES_TO_REMOVE = ["\t", "\r", "\n"]
20 | USES_AUTHORITY = frozenset(uses_netloc)
21 |
22 | SplitURLType = tuple[str, str, str, str, str]
23 |
24 |
25 | def split_url(url: str) -> SplitURLType:
26 | """Split URL into parts."""
27 | # Adapted from urllib.parse.urlsplit
28 | # Only lstrip url as some applications rely on preserving trailing space.
29 | # (https://url.spec.whatwg.org/#concept-basic-url-parser would strip both)
30 | url = url.lstrip(WHATWG_C0_CONTROL_OR_SPACE)
31 | for b in UNSAFE_URL_BYTES_TO_REMOVE:
32 | if b in url:
33 | url = url.replace(b, "")
34 |
35 | scheme = netloc = query = fragment = ""
36 | i = url.find(":")
37 | if i > 0 and url[0] in scheme_chars:
38 | for c in url[1:i]:
39 | if c not in scheme_chars:
40 | break
41 | else:
42 | scheme, url = url[:i].lower(), url[i + 1 :]
43 | has_hash = "#" in url
44 | has_question_mark = "?" in url
45 | if url[:2] == "//":
46 | delim = len(url) # position of end of domain part of url, default is end
47 | if has_hash and has_question_mark:
48 | delim_chars = "/?#"
49 | elif has_question_mark:
50 | delim_chars = "/?"
51 | elif has_hash:
52 | delim_chars = "/#"
53 | else:
54 | delim_chars = "/"
55 | for c in delim_chars: # look for delimiters; the order is NOT important
56 | wdelim = url.find(c, 2) # find first of this delim
57 | if wdelim >= 0 and wdelim < delim: # if found
58 | delim = wdelim # use earliest delim position
59 | netloc = url[2:delim]
60 | url = url[delim:]
61 | has_left_bracket = "[" in netloc
62 | has_right_bracket = "]" in netloc
63 | if (has_left_bracket and not has_right_bracket) or (
64 | has_right_bracket and not has_left_bracket
65 | ):
66 | raise ValueError("Invalid IPv6 URL")
67 | if has_left_bracket:
68 | bracketed_host = netloc.partition("[")[2].partition("]")[0]
69 | # Valid bracketed hosts are defined in
70 | # https://www.rfc-editor.org/rfc/rfc3986#page-49
71 | # https://url.spec.whatwg.org/
72 | if bracketed_host[0] == "v":
73 | if not re.match(r"\Av[a-fA-F0-9]+\..+\Z", bracketed_host):
74 | raise ValueError("IPvFuture address is invalid")
75 | elif ":" not in bracketed_host:
76 | raise ValueError("An IPv4 address cannot be in brackets")
77 | if has_hash:
78 | url, _, fragment = url.partition("#")
79 | if has_question_mark:
80 | url, _, query = url.partition("?")
81 | if netloc and not netloc.isascii():
82 | _check_netloc(netloc)
83 | return scheme, netloc, url, query, fragment
84 |
85 |
86 | def _check_netloc(netloc: str) -> None:
87 | # Adapted from urllib.parse._checknetloc
88 | # looking for characters like \u2100 that expand to 'a/c'
89 | # IDNA uses NFKC equivalence, so normalize for this check
90 |
91 | # ignore characters already included
92 | # but not the surrounding text
93 | n = netloc.replace("@", "").replace(":", "").replace("#", "").replace("?", "")
94 | normalized_netloc = unicodedata.normalize("NFKC", n)
95 | if n == normalized_netloc:
96 | return
97 | # Note that there are no unicode decompositions for the character '@' so
98 | # its currently impossible to have test coverage for this branch, however if the
99 | # one should be added in the future we want to make sure its still checked.
100 | for c in "/?#@:": # pragma: no branch
101 | if c in normalized_netloc:
102 | raise ValueError(
103 | f"netloc '{netloc}' contains invalid "
104 | "characters under NFKC normalization"
105 | )
106 |
107 |
108 | @lru_cache # match the same size as urlsplit
109 | def split_netloc(
110 | netloc: str,
111 | ) -> tuple[Union[str, None], Union[str, None], Union[str, None], Union[int, None]]:
112 | """Split netloc into username, password, host and port."""
113 | if "@" not in netloc:
114 | username: Union[str, None] = None
115 | password: Union[str, None] = None
116 | hostinfo = netloc
117 | else:
118 | userinfo, _, hostinfo = netloc.rpartition("@")
119 | username, have_password, password = userinfo.partition(":")
120 | if not have_password:
121 | password = None
122 |
123 | if "[" in hostinfo:
124 | _, _, bracketed = hostinfo.partition("[")
125 | hostname, _, port_str = bracketed.partition("]")
126 | _, _, port_str = port_str.partition(":")
127 | else:
128 | hostname, _, port_str = hostinfo.partition(":")
129 |
130 | if not port_str:
131 | return username or None, password, hostname or None, None
132 |
133 | try:
134 | port = int(port_str)
135 | except ValueError:
136 | raise ValueError("Invalid URL: port can't be converted to integer")
137 | if not (0 <= port <= 65535):
138 | raise ValueError("Port out of range 0-65535")
139 | return username or None, password, hostname or None, port
140 |
141 |
142 | def unsplit_result(
143 | scheme: str, netloc: str, url: str, query: str, fragment: str
144 | ) -> str:
145 | """Unsplit a URL without any normalization."""
146 | if netloc or (scheme and scheme in USES_AUTHORITY) or url[:2] == "//":
147 | if url and url[:1] != "/":
148 | url = f"{scheme}://{netloc}/{url}" if scheme else f"{scheme}:{url}"
149 | else:
150 | url = f"{scheme}://{netloc}{url}" if scheme else f"//{netloc}{url}"
151 | elif scheme:
152 | url = f"{scheme}:{url}"
153 | if query:
154 | url = f"{url}?{query}"
155 | return f"{url}#{fragment}" if fragment else url
156 |
157 |
158 | @lru_cache # match the same size as urlsplit
159 | def make_netloc(
160 | user: Union[str, None],
161 | password: Union[str, None],
162 | host: Union[str, None],
163 | port: Union[int, None],
164 | encode: bool = False,
165 | ) -> str:
166 | """Make netloc from parts.
167 |
168 | The user and password are encoded if encode is True.
169 |
170 | The host must already be encoded with _encode_host.
171 | """
172 | if host is None:
173 | return ""
174 | ret = host
175 | if port is not None:
176 | ret = f"{ret}:{port}"
177 | if user is None and password is None:
178 | return ret
179 | if password is not None:
180 | if not user:
181 | user = ""
182 | elif encode:
183 | user = QUOTER(user)
184 | if encode:
185 | password = QUOTER(password)
186 | user = f"{user}:{password}"
187 | elif user and encode:
188 | user = QUOTER(user)
189 | return f"{user}@{ret}" if user else ret
190 |
191 |
192 | def query_to_pairs(query_string: str) -> list[tuple[str, str]]:
193 | """Parse a query given as a string argument.
194 |
195 | Works like urllib.parse.parse_qsl with keep empty values.
196 | """
197 | pairs: list[tuple[str, str]] = []
198 | if not query_string:
199 | return pairs
200 | for k_v in query_string.split("&"):
201 | k, _, v = k_v.partition("=")
202 | pairs.append((UNQUOTER_PLUS(k), UNQUOTER_PLUS(v)))
203 | return pairs
204 |
--------------------------------------------------------------------------------
/yarl/_path.py:
--------------------------------------------------------------------------------
1 | """Utilities for working with paths."""
2 |
3 | from collections.abc import Sequence
4 | from contextlib import suppress
5 |
6 |
7 | def normalize_path_segments(segments: Sequence[str]) -> list[str]:
8 | """Drop '.' and '..' from a sequence of str segments"""
9 |
10 | resolved_path: list[str] = []
11 |
12 | for seg in segments:
13 | if seg == "..":
14 | # ignore any .. segments that would otherwise cause an
15 | # IndexError when popped from resolved_path if
16 | # resolving for rfc3986
17 | with suppress(IndexError):
18 | resolved_path.pop()
19 | elif seg != ".":
20 | resolved_path.append(seg)
21 |
22 | if segments and segments[-1] in (".", ".."):
23 | # do some post-processing here.
24 | # if the last segment was a relative dir,
25 | # then we need to append the trailing '/'
26 | resolved_path.append("")
27 |
28 | return resolved_path
29 |
30 |
31 | def normalize_path(path: str) -> str:
32 | # Drop '.' and '..' from str path
33 | prefix = ""
34 | if path and path[0] == "/":
35 | # preserve the "/" root element of absolute paths, copying it to the
36 | # normalised output as per sections 5.2.4 and 6.2.2.3 of rfc3986.
37 | prefix = "/"
38 | path = path[1:]
39 |
40 | segments = path.split("/")
41 | return prefix + "/".join(normalize_path_segments(segments))
42 |
--------------------------------------------------------------------------------
/yarl/_query.py:
--------------------------------------------------------------------------------
1 | """Query string handling."""
2 |
3 | import math
4 | from collections.abc import Iterable, Mapping, Sequence
5 | from typing import Any, SupportsInt, Union
6 |
7 | from multidict import istr
8 |
9 | from ._quoters import QUERY_PART_QUOTER, QUERY_QUOTER
10 |
11 | SimpleQuery = Union[str, SupportsInt, float]
12 | QueryVariable = Union[SimpleQuery, Sequence[SimpleQuery]]
13 | Query = Union[
14 | None, str, Mapping[str, QueryVariable], Sequence[tuple[str, QueryVariable]]
15 | ]
16 |
17 |
18 | def query_var(v: SimpleQuery) -> str:
19 | """Convert a query variable to a string."""
20 | cls = type(v)
21 | if cls is int: # Fast path for non-subclassed int
22 | return str(v)
23 | if isinstance(v, str):
24 | return v
25 | if isinstance(v, float):
26 | if math.isinf(v):
27 | raise ValueError("float('inf') is not supported")
28 | if math.isnan(v):
29 | raise ValueError("float('nan') is not supported")
30 | return str(float(v))
31 | if cls is not bool and isinstance(v, SupportsInt):
32 | return str(int(v))
33 | raise TypeError(
34 | "Invalid variable type: value "
35 | "should be str, int or float, got {!r} "
36 | "of type {}".format(v, cls)
37 | )
38 |
39 |
40 | def get_str_query_from_sequence_iterable(
41 | items: Iterable[tuple[Union[str, istr], QueryVariable]],
42 | ) -> str:
43 | """Return a query string from a sequence of (key, value) pairs.
44 |
45 | value is a single value or a sequence of values for the key
46 |
47 | The sequence of values must be a list or tuple.
48 | """
49 | quoter = QUERY_PART_QUOTER
50 | pairs = [
51 | f"{quoter(k)}={quoter(v if type(v) is str else query_var(v))}"
52 | for k, val in items
53 | for v in (
54 | val if type(val) is not str and isinstance(val, (list, tuple)) else (val,)
55 | )
56 | ]
57 | return "&".join(pairs)
58 |
59 |
60 | def get_str_query_from_iterable(
61 | items: Iterable[tuple[Union[str, istr], SimpleQuery]],
62 | ) -> str:
63 | """Return a query string from an iterable.
64 |
65 | The iterable must contain (key, value) pairs.
66 |
67 | The values are not allowed to be sequences, only single values are
68 | allowed. For sequences, use `_get_str_query_from_sequence_iterable`.
69 | """
70 | quoter = QUERY_PART_QUOTER
71 | # A listcomp is used since listcomps are inlined on CPython 3.12+ and
72 | # they are a bit faster than a generator expression.
73 | pairs = [
74 | f"{quoter(k)}={quoter(v if type(v) is str else query_var(v))}" for k, v in items
75 | ]
76 | return "&".join(pairs)
77 |
78 |
79 | def get_str_query(*args: Any, **kwargs: Any) -> Union[str, None]:
80 | """Return a query string from supported args."""
81 | query: Union[str, Mapping[str, QueryVariable], None]
82 | if kwargs:
83 | if args:
84 | msg = "Either kwargs or single query parameter must be present"
85 | raise ValueError(msg)
86 | query = kwargs
87 | elif len(args) == 1:
88 | query = args[0]
89 | else:
90 | raise ValueError("Either kwargs or single query parameter must be present")
91 |
92 | if query is None:
93 | return None
94 | if not query:
95 | return ""
96 | if type(query) is dict:
97 | return get_str_query_from_sequence_iterable(query.items())
98 | if type(query) is str or isinstance(query, str):
99 | return QUERY_QUOTER(query)
100 | if isinstance(query, Mapping):
101 | return get_str_query_from_sequence_iterable(query.items())
102 | if isinstance(query, (bytes, bytearray, memoryview)): # type: ignore[unreachable]
103 | msg = "Invalid query type: bytes, bytearray and memoryview are forbidden"
104 | raise TypeError(msg)
105 | if isinstance(query, Sequence):
106 | # We don't expect sequence values if we're given a list of pairs
107 | # already; only mappings like builtin `dict` which can't have the
108 | # same key pointing to multiple values are allowed to use
109 | # `_query_seq_pairs`.
110 | return get_str_query_from_iterable(query)
111 | raise TypeError(
112 | "Invalid query type: only str, mapping or "
113 | "sequence of (key, value) pairs is allowed"
114 | )
115 |
--------------------------------------------------------------------------------
/yarl/_quoters.py:
--------------------------------------------------------------------------------
1 | """Quoting and unquoting utilities for URL parts."""
2 |
3 | from typing import Union
4 | from urllib.parse import quote
5 |
6 | from ._quoting import _Quoter, _Unquoter
7 |
8 | QUOTER = _Quoter(requote=False)
9 | REQUOTER = _Quoter()
10 | PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False)
11 | PATH_REQUOTER = _Quoter(safe="@:", protected="/+")
12 | QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False)
13 | QUERY_REQUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True)
14 | QUERY_PART_QUOTER = _Quoter(safe="?/:@", qs=True, requote=False)
15 | FRAGMENT_QUOTER = _Quoter(safe="?/:@", requote=False)
16 | FRAGMENT_REQUOTER = _Quoter(safe="?/:@")
17 |
18 | UNQUOTER = _Unquoter()
19 | PATH_UNQUOTER = _Unquoter(unsafe="+")
20 | PATH_SAFE_UNQUOTER = _Unquoter(ignore="/%", unsafe="+")
21 | QS_UNQUOTER = _Unquoter(qs=True)
22 | UNQUOTER_PLUS = _Unquoter(plus=True) # to match urllib.parse.unquote_plus
23 |
24 |
25 | def human_quote(s: Union[str, None], unsafe: str) -> Union[str, None]:
26 | if not s:
27 | return s
28 | for c in "%" + unsafe:
29 | if c in s:
30 | s = s.replace(c, f"%{ord(c):02X}")
31 | if s.isprintable():
32 | return s
33 | return "".join(c if c.isprintable() else quote(c) for c in s)
34 |
--------------------------------------------------------------------------------
/yarl/_quoting.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from typing import TYPE_CHECKING
4 |
5 | __all__ = ("_Quoter", "_Unquoter")
6 |
7 |
8 | NO_EXTENSIONS = bool(os.environ.get("YARL_NO_EXTENSIONS")) # type: bool
9 | if sys.implementation.name != "cpython":
10 | NO_EXTENSIONS = True
11 |
12 |
13 | if TYPE_CHECKING or NO_EXTENSIONS:
14 | from ._quoting_py import _Quoter, _Unquoter
15 | else:
16 | try:
17 | from ._quoting_c import _Quoter, _Unquoter
18 | except ImportError: # pragma: no cover
19 | from ._quoting_py import _Quoter, _Unquoter # type: ignore[assignment]
20 |
--------------------------------------------------------------------------------
/yarl/_quoting_py.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import re
3 | from string import ascii_letters, ascii_lowercase, digits
4 | from typing import Union, cast, overload
5 |
6 | BASCII_LOWERCASE = ascii_lowercase.encode("ascii")
7 | BPCT_ALLOWED = {f"%{i:02X}".encode("ascii") for i in range(256)}
8 | GEN_DELIMS = ":/?#[]@"
9 | SUB_DELIMS_WITHOUT_QS = "!$'()*,"
10 | SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + "+&=;"
11 | RESERVED = GEN_DELIMS + SUB_DELIMS
12 | UNRESERVED = ascii_letters + digits + "-._~"
13 | ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS
14 |
15 |
16 | _IS_HEX = re.compile(b"[A-Z0-9][A-Z0-9]")
17 | _IS_HEX_STR = re.compile("[A-Fa-f0-9][A-Fa-f0-9]")
18 |
19 | utf8_decoder = codecs.getincrementaldecoder("utf-8")
20 |
21 |
22 | class _Quoter:
23 | def __init__(
24 | self,
25 | *,
26 | safe: str = "",
27 | protected: str = "",
28 | qs: bool = False,
29 | requote: bool = True,
30 | ) -> None:
31 | self._safe = safe
32 | self._protected = protected
33 | self._qs = qs
34 | self._requote = requote
35 |
36 | @overload
37 | def __call__(self, val: str) -> str: ...
38 | @overload
39 | def __call__(self, val: None) -> None: ...
40 | def __call__(self, val: Union[str, None]) -> Union[str, None]:
41 | if val is None:
42 | return None
43 | if not isinstance(val, str):
44 | raise TypeError("Argument should be str")
45 | if not val:
46 | return ""
47 | bval = val.encode("utf8", errors="ignore")
48 | ret = bytearray()
49 | pct = bytearray()
50 | safe = self._safe
51 | safe += ALLOWED
52 | if not self._qs:
53 | safe += "+&=;"
54 | safe += self._protected
55 | bsafe = safe.encode("ascii")
56 | idx = 0
57 | while idx < len(bval):
58 | ch = bval[idx]
59 | idx += 1
60 |
61 | if pct:
62 | if ch in BASCII_LOWERCASE:
63 | ch = ch - 32 # convert to uppercase
64 | pct.append(ch)
65 | if len(pct) == 3: # pragma: no branch # peephole optimizer
66 | buf = pct[1:]
67 | if not _IS_HEX.match(buf):
68 | ret.extend(b"%25")
69 | pct.clear()
70 | idx -= 2
71 | continue
72 | try:
73 | unquoted = chr(int(pct[1:].decode("ascii"), base=16))
74 | except ValueError:
75 | ret.extend(b"%25")
76 | pct.clear()
77 | idx -= 2
78 | continue
79 |
80 | if unquoted in self._protected:
81 | ret.extend(pct)
82 | elif unquoted in safe:
83 | ret.append(ord(unquoted))
84 | else:
85 | ret.extend(pct)
86 | pct.clear()
87 |
88 | # special case, if we have only one char after "%"
89 | elif len(pct) == 2 and idx == len(bval):
90 | ret.extend(b"%25")
91 | pct.clear()
92 | idx -= 1
93 |
94 | continue
95 |
96 | elif ch == ord("%") and self._requote:
97 | pct.clear()
98 | pct.append(ch)
99 |
100 | # special case if "%" is last char
101 | if idx == len(bval):
102 | ret.extend(b"%25")
103 |
104 | continue
105 |
106 | if self._qs and ch == ord(" "):
107 | ret.append(ord("+"))
108 | continue
109 | if ch in bsafe:
110 | ret.append(ch)
111 | continue
112 |
113 | ret.extend((f"%{ch:02X}").encode("ascii"))
114 |
115 | ret2 = ret.decode("ascii")
116 | if ret2 == val:
117 | return val
118 | return ret2
119 |
120 |
121 | class _Unquoter:
122 | def __init__(
123 | self,
124 | *,
125 | ignore: str = "",
126 | unsafe: str = "",
127 | qs: bool = False,
128 | plus: bool = False,
129 | ) -> None:
130 | self._ignore = ignore
131 | self._unsafe = unsafe
132 | self._qs = qs
133 | self._plus = plus # to match urllib.parse.unquote_plus
134 | self._quoter = _Quoter()
135 | self._qs_quoter = _Quoter(qs=True)
136 |
137 | @overload
138 | def __call__(self, val: str) -> str: ...
139 | @overload
140 | def __call__(self, val: None) -> None: ...
141 | def __call__(self, val: Union[str, None]) -> Union[str, None]:
142 | if val is None:
143 | return None
144 | if not isinstance(val, str):
145 | raise TypeError("Argument should be str")
146 | if not val:
147 | return ""
148 | decoder = cast(codecs.BufferedIncrementalDecoder, utf8_decoder())
149 | ret = []
150 | idx = 0
151 | while idx < len(val):
152 | ch = val[idx]
153 | idx += 1
154 | if ch == "%" and idx <= len(val) - 2:
155 | pct = val[idx : idx + 2]
156 | if _IS_HEX_STR.fullmatch(pct):
157 | b = bytes([int(pct, base=16)])
158 | idx += 2
159 | try:
160 | unquoted = decoder.decode(b)
161 | except UnicodeDecodeError:
162 | start_pct = idx - 3 - len(decoder.buffer) * 3
163 | ret.append(val[start_pct : idx - 3])
164 | decoder.reset()
165 | try:
166 | unquoted = decoder.decode(b)
167 | except UnicodeDecodeError:
168 | ret.append(val[idx - 3 : idx])
169 | continue
170 | if not unquoted:
171 | continue
172 | if self._qs and unquoted in "+=&;":
173 | to_add = self._qs_quoter(unquoted)
174 | if to_add is None: # pragma: no cover
175 | raise RuntimeError("Cannot quote None")
176 | ret.append(to_add)
177 | elif unquoted in self._unsafe or unquoted in self._ignore:
178 | to_add = self._quoter(unquoted)
179 | if to_add is None: # pragma: no cover
180 | raise RuntimeError("Cannot quote None")
181 | ret.append(to_add)
182 | else:
183 | ret.append(unquoted)
184 | continue
185 |
186 | if decoder.buffer:
187 | start_pct = idx - 1 - len(decoder.buffer) * 3
188 | ret.append(val[start_pct : idx - 1])
189 | decoder.reset()
190 |
191 | if ch == "+":
192 | if (not self._qs and not self._plus) or ch in self._unsafe:
193 | ret.append("+")
194 | else:
195 | ret.append(" ")
196 | continue
197 |
198 | if ch in self._unsafe:
199 | ret.append("%")
200 | h = hex(ord(ch)).upper()[2:]
201 | for ch in h:
202 | ret.append(ch)
203 | continue
204 |
205 | ret.append(ch)
206 |
207 | if decoder.buffer:
208 | ret.append(val[-len(decoder.buffer) * 3 :])
209 |
210 | ret2 = "".join(ret)
211 | if ret2 == val:
212 | return val
213 | return ret2
214 |
--------------------------------------------------------------------------------
/yarl/py.typed:
--------------------------------------------------------------------------------
1 | # Placeholder
2 |
--------------------------------------------------------------------------------