├── .git_archival.txt
├── .gitattributes
├── .github
├── actions
│ └── cache-keys
│ │ └── action.yml
├── reusables
│ └── tox-dev
│ │ └── workflow
│ │ └── reusable-tox
│ │ └── hooks
│ │ ├── post-src-checkout
│ │ └── action.yml
│ │ ├── post-tox-run
│ │ └── action.yml
│ │ └── prepare-for-tox-run
│ │ └── action.yml
└── workflows
│ ├── ci-cd.yml
│ └── scheduled-runs.yml
├── .gitignore
├── .python-version
├── LICENSE
├── Makefile
├── README.rst
├── examples
├── abstract_namespace.py
├── docker-info.py
└── simple-http.py
├── pyproject.toml
├── pytest.ini
├── requests_unixsocket
├── __init__.py
├── adapters.py
├── tests
│ └── test_requests_unixsocket.py
└── testutils.py
├── tox.ini
└── toxfile.py
/.git_archival.txt:
--------------------------------------------------------------------------------
1 | node: 94f5fcfeffc9f1d3e05a18835c41863974f10c4b
2 | node-date: 2025-05-05T01:28:05+02:00
3 | describe-name: v0.4.1-7-g94f5fcf
4 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | .git_archival.txt export-subst
2 |
--------------------------------------------------------------------------------
/.github/actions/cache-keys/action.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | outputs:
4 | cache-key-for-dep-files:
5 | description: >-
6 | A cache key string derived from the dependency declaration files.
7 | value: ${{ steps.calc-cache-key-files.outputs.files-hash-key }}
8 |
9 | runs:
10 | using: composite
11 | steps:
12 | - name: >-
13 | Calculate dependency files' combined hash value
14 | for use in the cache key
15 | id: calc-cache-key-files
16 | run: |
17 | from os import environ
18 | from pathlib import Path
19 |
20 | FILE_APPEND_MODE = 'a'
21 |
22 | files_derived_hash = '${{
23 | hashFiles(
24 | 'tox.ini',
25 | 'pyproject.toml',
26 | '.pre-commit-config.yaml',
27 | 'pytest.ini',
28 | 'dependencies/**/*'
29 | )
30 | }}'
31 |
32 | print(f'Computed file-derived hash is {files_derived_hash}.')
33 |
34 | with Path(environ['GITHUB_OUTPUT']).open(
35 | mode=FILE_APPEND_MODE,
36 | ) as outputs_file:
37 | print(
38 | f'files-hash-key={files_derived_hash}',
39 | file=outputs_file,
40 | )
41 | shell: python
42 |
43 | ...
44 |
--------------------------------------------------------------------------------
/.github/reusables/tox-dev/workflow/reusable-tox/hooks/post-src-checkout/action.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | inputs:
4 | calling-job-context:
5 | description: A JSON with the calling job inputs
6 | type: string
7 | job-dependencies-context:
8 | default: >-
9 | {}
10 | description: >-
11 | The `$ {{ needs }}` context passed from the calling workflow
12 | encoded as a JSON string. The caller is expected to form this
13 | input as follows:
14 | `job-dependencies-context: $ {{ toJSON(needs) }}`.
15 | required: false
16 | type: string
17 |
18 | runs:
19 | using: composite
20 | steps:
21 | - name: Log setting up pre-commit cache
22 | if: fromJSON(inputs.calling-job-context).toxenv == 'pre-commit'
23 | run: >-
24 | >&2 echo Caching ~/.cache/pre-commit based on
25 | the contents of '.pre-commit-config.yaml'...
26 | shell: bash
27 | - name: Cache pre-commit.com virtualenvs
28 | if: fromJSON(inputs.calling-job-context).toxenv == 'pre-commit'
29 | uses: actions/cache@v4
30 | with:
31 | path: ~/.cache/pre-commit
32 | key: >-
33 | ${{
34 | runner.os
35 | }}-pre-commit-${{
36 | hashFiles('.pre-commit-config.yaml')
37 | }}
38 |
39 | ...
40 |
--------------------------------------------------------------------------------
/.github/reusables/tox-dev/workflow/reusable-tox/hooks/post-tox-run/action.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | inputs:
4 | calling-job-context:
5 | description: A JSON with the calling job inputs
6 | type: string
7 | job-dependencies-context:
8 | default: >-
9 | {}
10 | description: >-
11 | The `$ {{ needs }}` context passed from the calling workflow
12 | encoded as a JSON string. The caller is expected to form this
13 | input as follows:
14 | `job-dependencies-context: $ {{ toJSON(needs) }}`.
15 | required: false
16 | type: string
17 |
18 | runs:
19 | using: composite
20 | steps:
21 | - name: Verify that the artifacts with expected names got created
22 | if: fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
23 | run: >
24 | # Verify that the artifacts with expected names got created
25 |
26 |
27 | ls -1
28 | 'dist/${{
29 | fromJSON(
30 | inputs.job-dependencies-context
31 | ).pre-setup.outputs.sdist-artifact-name
32 | }}'
33 | 'dist/${{
34 | fromJSON(
35 | inputs.job-dependencies-context
36 | ).pre-setup.outputs.wheel-artifact-name
37 | }}'
38 | shell: bash
39 | - name: Store the distribution packages
40 | if: fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
41 | uses: actions/upload-artifact@v4
42 | with:
43 | name: >-
44 | ${{
45 | fromJSON(
46 | inputs.job-dependencies-context
47 | ).pre-setup.outputs.dists-artifact-name
48 | }}
49 | # NOTE: Exact expected file names are specified here
50 | # NOTE: as a safety measure — if anything weird ends
51 | # NOTE: up being in this dir or not all dists will be
52 | # NOTE: produced, this will fail the workflow.
53 | path: |
54 | dist/${{
55 | fromJSON(
56 | inputs.job-dependencies-context
57 | ).pre-setup.outputs.sdist-artifact-name
58 | }}
59 | dist/${{
60 | fromJSON(
61 | inputs.job-dependencies-context
62 | ).pre-setup.outputs.wheel-artifact-name
63 | }}
64 | retention-days: >-
65 | ${{
66 | fromJSON(
67 | fromJSON(
68 | inputs.job-dependencies-context
69 | ).pre-setup.outputs.release-requested
70 | )
71 | && 90
72 | || 30
73 | }}
74 |
75 | ...
76 |
--------------------------------------------------------------------------------
/.github/reusables/tox-dev/workflow/reusable-tox/hooks/prepare-for-tox-run/action.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | inputs:
4 | calling-job-context:
5 | description: A JSON with the calling job inputs
6 | type: string
7 | job-dependencies-context:
8 | default: >-
9 | {}
10 | description: >-
11 | The `$ {{ needs }}` context passed from the calling workflow
12 | encoded as a JSON string. The caller is expected to form this
13 | input as follows:
14 | `job-dependencies-context: $ {{ toJSON(needs) }}`.
15 | required: false
16 | type: string
17 |
18 | runs:
19 | using: composite
20 | steps:
21 | - name: Drop Git tags from HEAD for non-tag-create events
22 | if: >-
23 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
24 | && !fromJSON(
25 | fromJSON(
26 | inputs.job-dependencies-context
27 | ).pre-setup.outputs.release-requested
28 | )
29 | run: >-
30 | git tag --points-at HEAD
31 | |
32 | xargs git tag --delete
33 | shell: bash
34 |
35 | - name: Setup git user as [bot]
36 | if: >-
37 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
38 | && (
39 | fromJSON(
40 | fromJSON(
41 | inputs.job-dependencies-context
42 | ).pre-setup.outputs.release-requested
43 | )
44 | || fromJSON(
45 | fromJSON(
46 | inputs.job-dependencies-context
47 | ).pre-setup.outputs.is-untagged-devel
48 | )
49 | )
50 | uses: fregante/setup-git-user@v2
51 | - name: >-
52 | Tag the release in the local Git repo
53 | as ${{
54 | fromJSON(
55 | inputs.job-dependencies-context
56 | ).pre-setup.outputs.git-tag
57 | }}
58 | for setuptools-scm to set the desired version
59 | if: >-
60 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
61 | && fromJSON(
62 | fromJSON(
63 | inputs.job-dependencies-context
64 | ).pre-setup.outputs.release-requested
65 | )
66 | run: >-
67 | git tag
68 | -m '${{
69 | fromJSON(
70 | inputs.job-dependencies-context
71 | ).pre-setup.outputs.git-tag
72 | }}'
73 | '${{
74 | fromJSON(
75 | inputs.job-dependencies-context
76 | ).pre-setup.outputs.git-tag
77 | }}'
78 | --
79 | ${{ fromJSON(inputs.calling-job-context).checkout-src-git-committish }}
80 | shell: bash
81 |
82 | - name: Install tomlkit Python distribution package
83 | if: >-
84 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
85 | && fromJSON(
86 | fromJSON(
87 | inputs.job-dependencies-context
88 | ).pre-setup.outputs.is-untagged-devel
89 | )
90 | run: >-
91 | python -m pip install --user tomlkit
92 | shell: bash
93 | - name: Instruct setuptools-scm not to add a local version part
94 | if: >-
95 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
96 | && fromJSON(
97 | fromJSON(
98 | inputs.job-dependencies-context
99 | ).pre-setup.outputs.is-untagged-devel
100 | )
101 | run: |
102 | from pathlib import Path
103 |
104 | import tomlkit
105 |
106 | pyproject_toml_path = Path.cwd() / 'pyproject.toml'
107 | pyproject_toml_txt = pyproject_toml_path.read_text()
108 | pyproject_toml = tomlkit.loads(pyproject_toml_txt)
109 | setuptools_scm_section = pyproject_toml['tool']['setuptools_scm']
110 | setuptools_scm_section['local_scheme'] = 'no-local-version'
111 | patched_pyproject_toml_txt = tomlkit.dumps(pyproject_toml)
112 | pyproject_toml_path.write_text(patched_pyproject_toml_txt)
113 | shell: python
114 | - name: Pretend that pyproject.toml is unchanged
115 | if: >-
116 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists'
117 | && fromJSON(
118 | fromJSON(
119 | inputs.job-dependencies-context
120 | ).pre-setup.outputs.is-untagged-devel
121 | )
122 | run: |
123 | git diff --color=always
124 | git update-index --assume-unchanged pyproject.toml
125 | shell: bash
126 |
127 | ...
128 |
--------------------------------------------------------------------------------
/.github/workflows/ci-cd.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: 🧪
4 |
5 | on:
6 | merge_group:
7 | push: # publishes to TestPyPI pushes to the main branch
8 | branches-ignore:
9 | - dependabot/** # Dependabot always creates PRs
10 | - gh-readonly-queue/** # Temporary merge queue-related GH-made branches
11 | - maintenance/pip-tools-constraint-lockfiles # Lock files through PRs
12 | - maintenance/pip-tools-constraint-lockfiles-** # Lock files through PRs
13 | - patchback/backports/** # Patchback always creates PRs
14 | - pre-commit-ci-update-config # pre-commit.ci always creates a PR
15 | pull_request:
16 | ignore-paths: # changes to the cron workflow are triggered through it
17 | - .github/workflows/scheduled-runs.yml
18 | types:
19 | - opened # default
20 | - synchronize # default
21 | - reopened # default
22 | - ready_for_review # used in PRs created from GitHub Actions workflows
23 | workflow_call: # a way to embed the main tests
24 | workflow_dispatch:
25 | inputs:
26 | release-version:
27 | # github.event_name == 'workflow_dispatch'
28 | # && github.event.inputs.release-version
29 | description: >-
30 | Target PEP440-compliant version to release.
31 | Please, don't prepend `v`.
32 | required: true
33 | type: string
34 | release-committish:
35 | # github.event_name == 'workflow_dispatch'
36 | # && github.event.inputs.release-committish
37 | default: ''
38 | description: >-
39 | The commit to be released to PyPI and tagged
40 | in Git as `release-version`. Normally, you
41 | should keep this empty.
42 | type: string
43 | YOLO:
44 | default: false
45 | description: >-
46 | Set this flag to disregard the outcome of the
47 | test stage. The test results will block the
48 | release otherwise. Only use this under
49 | extraordinary circumstances to ignore the test
50 | failures and cut the release regardless.
51 | type: boolean
52 |
53 | concurrency:
54 | group: >-
55 | ${{
56 | github.workflow
57 | }}-${{
58 | github.ref_type
59 | }}-${{
60 | github.event.pull_request.number || github.sha
61 | }}
62 | cancel-in-progress: true
63 |
64 | env:
65 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it
66 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement
67 | PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message
68 | PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message
69 | PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message
70 | PRE_COMMIT_COLOR: always
71 | PROJECT_NAME: requests-unixsocket
72 | PUBLISHING_TO_TESTPYPI_ENABLED: true
73 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest`
74 | PYTHONIOENCODING: utf-8
75 | PYTHONUTF8: 1
76 | TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation
77 | TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests
78 | FORCE_COLOR
79 | MYPY_FORCE_COLOR
80 | NO_COLOR
81 | PIP_DISABLE_PIP_VERSION_CHECK
82 | PIP_NO_PYTHON_VERSION_WARNING
83 | PIP_NO_WARN_SCRIPT_LOCATION
84 | PRE_COMMIT_COLOR
85 | PY_COLORS
86 | PYTEST_THEME
87 | PYTEST_THEME_MODE
88 | PYTHONIOENCODING
89 | PYTHONLEGACYWINDOWSSTDIO
90 | PYTHONUTF8
91 | UPSTREAM_REPOSITORY_ID: >-
92 | 27133968
93 |
94 | run-name: >-
95 | ${{
96 | github.event_name == 'workflow_dispatch'
97 | && format('📦 Releasing v{0}...', github.event.inputs.release-version)
98 | || ''
99 | }}
100 | ${{
101 | github.event.pull_request.number && '🔀 PR' || ''
102 | }}${{
103 | !github.event.pull_request.number && '🌱 Commit' || ''
104 | }}
105 | ${{ github.event.pull_request.number || github.sha }}
106 | triggered by: ${{ github.event_name }} of ${{
107 | github.ref
108 | }} ${{
109 | github.ref_type
110 | }}
111 | (workflow run ID: ${{
112 | github.run_id
113 | }}; number: ${{
114 | github.run_number
115 | }}; attempt: ${{
116 | github.run_attempt
117 | }})
118 |
119 | jobs:
120 | pre-setup:
121 | name: ⚙️ Pre-set global build settings
122 |
123 | runs-on: ubuntu-latest
124 |
125 | timeout-minutes: 1
126 |
127 | defaults:
128 | run:
129 | shell: python
130 |
131 | outputs:
132 | # NOTE: These aren't env vars because the `${{ env }}` context is
133 | # NOTE: inaccessible when passing inputs to reusable workflows.
134 | dists-artifact-name: python-package-distributions
135 | dist-version: >-
136 | ${{
137 | steps.request-check.outputs.release-requested == 'true'
138 | && github.event.inputs.release-version
139 | || steps.scm-version.outputs.dist-version
140 | }}
141 | is-untagged-devel: >-
142 | ${{ steps.untagged-check.outputs.is-untagged-devel || false }}
143 | release-requested: >-
144 | ${{
145 | steps.request-check.outputs.release-requested || false
146 | }}
147 | is-yolo-mode: >-
148 | ${{
149 | (
150 | steps.request-check.outputs.release-requested == 'true'
151 | && github.event.inputs.YOLO
152 | )
153 | && true || false
154 | }}
155 | cache-key-for-dep-files: >-
156 | ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }}
157 | git-tag: ${{ steps.git-tag.outputs.tag }}
158 | sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }}
159 | wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }}
160 | upstream-repository-id: ${{ env.UPSTREAM_REPOSITORY_ID }}
161 | publishing-to-testpypi-enabled: ${{ env.PUBLISHING_TO_TESTPYPI_ENABLED }}
162 | is-debug-mode: ${{ toJSON(runner.debug == '1') }}
163 | steps:
164 | - name: Switch to using Python 3.13 by default
165 | uses: actions/setup-python@v5
166 | with:
167 | python-version: 3.13
168 | - name: >-
169 | Mark the build as untagged '${{
170 | github.event.repository.default_branch
171 | }}' branch build
172 | id: untagged-check
173 | if: >-
174 | github.event_name == 'push' &&
175 | github.ref == format(
176 | 'refs/heads/{0}', github.event.repository.default_branch
177 | )
178 | run: |
179 | from os import environ
180 | from pathlib import Path
181 |
182 | FILE_APPEND_MODE = 'a'
183 |
184 | with Path(environ['GITHUB_OUTPUT']).open(
185 | mode=FILE_APPEND_MODE,
186 | ) as outputs_file:
187 | print('is-untagged-devel=true', file=outputs_file)
188 | - name: Mark the build as "release request"
189 | id: request-check
190 | if: github.event_name == 'workflow_dispatch'
191 | run: |
192 | from os import environ
193 | from pathlib import Path
194 |
195 | FILE_APPEND_MODE = 'a'
196 |
197 | with Path(environ['GITHUB_OUTPUT']).open(
198 | mode=FILE_APPEND_MODE,
199 | ) as outputs_file:
200 | print('release-requested=true', file=outputs_file)
201 | - name: Check out src from Git
202 | if: >-
203 | steps.request-check.outputs.release-requested != 'true'
204 | uses: actions/checkout@v4
205 | with:
206 | fetch-depth: 0
207 | ref: ${{ github.event.inputs.release-committish }}
208 | - name: >-
209 | Calculate dependency files' combined hash value
210 | for use in the cache key
211 | if: >-
212 | steps.request-check.outputs.release-requested != 'true'
213 | id: calc-cache-key-files
214 | uses: ./.github/actions/cache-keys
215 | - name: Set up pip cache
216 | if: >-
217 | steps.request-check.outputs.release-requested != 'true'
218 | uses: re-actors/cache-python-deps@release/v1
219 | with:
220 | cache-key-for-dependency-files: >-
221 | ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }}
222 | - name: Drop Git tags from HEAD for non-release requests
223 | if: >-
224 | steps.request-check.outputs.release-requested != 'true'
225 | run: >-
226 | git tag --points-at HEAD
227 | |
228 | xargs git tag --delete
229 | shell: bash
230 | - name: Set up versioning prerequisites
231 | if: >-
232 | steps.request-check.outputs.release-requested != 'true'
233 | run: >-
234 | python -m
235 | pip install
236 | --user
237 | setuptools-scm
238 | shell: bash
239 | - name: Set the current dist version from Git
240 | if: steps.request-check.outputs.release-requested != 'true'
241 | id: scm-version
242 | run: |
243 | from os import environ
244 | from pathlib import Path
245 |
246 | import setuptools_scm
247 |
248 | FILE_APPEND_MODE = 'a'
249 |
250 | ver = setuptools_scm.get_version(
251 | ${{
252 | steps.untagged-check.outputs.is-untagged-devel == 'true'
253 | && 'local_scheme="no-local-version"' || ''
254 | }}
255 | )
256 | with Path(environ['GITHUB_OUTPUT']).open(
257 | mode=FILE_APPEND_MODE,
258 | ) as outputs_file:
259 | print(f'dist-version={ver}', file=outputs_file)
260 | print(
261 | f'dist-version-for-filenames={ver.replace("+", "-")}',
262 | file=outputs_file,
263 | )
264 | - name: Set the target Git tag
265 | id: git-tag
266 | run: |
267 | from os import environ
268 | from pathlib import Path
269 |
270 | FILE_APPEND_MODE = 'a'
271 |
272 | with Path(environ['GITHUB_OUTPUT']).open(
273 | mode=FILE_APPEND_MODE,
274 | ) as outputs_file:
275 | print(
276 | "tag=v${{
277 | steps.request-check.outputs.release-requested == 'true'
278 | && github.event.inputs.release-version
279 | || steps.scm-version.outputs.dist-version
280 | }}",
281 | file=outputs_file,
282 | )
283 | - name: Set the expected dist artifact names
284 | id: artifact-name
285 | run: |
286 | from os import environ
287 | from pathlib import Path
288 |
289 | FILE_APPEND_MODE = 'a'
290 |
291 | whl_file_prj_base_name = '${{ env.PROJECT_NAME }}'.replace('-', '_')
292 | sdist_file_prj_base_name = whl_file_prj_base_name.replace('.', '_')
293 |
294 | with Path(environ['GITHUB_OUTPUT']).open(
295 | mode=FILE_APPEND_MODE,
296 | ) as outputs_file:
297 | print(
298 | f"sdist={sdist_file_prj_base_name !s}-${{
299 | steps.request-check.outputs.release-requested == 'true'
300 | && github.event.inputs.release-version
301 | || steps.scm-version.outputs.dist-version
302 | }}.tar.gz",
303 | file=outputs_file,
304 | )
305 | print(
306 | f"wheel={whl_file_prj_base_name !s}-${{
307 | steps.request-check.outputs.release-requested == 'true'
308 | && github.event.inputs.release-version
309 | || steps.scm-version.outputs.dist-version
310 | }}-py3-none-any.whl",
311 | file=outputs_file,
312 | )
313 |
314 | build:
315 | name: >-
316 | 📦 ${{ needs.pre-setup.outputs.git-tag }}
317 | [mode: ${{
318 | fromJSON(needs.pre-setup.outputs.is-untagged-devel)
319 | && 'test' || ''
320 | }}${{
321 | fromJSON(needs.pre-setup.outputs.release-requested)
322 | && 'release' || ''
323 | }}${{
324 | (
325 | !fromJSON(needs.pre-setup.outputs.is-untagged-devel)
326 | && !fromJSON(needs.pre-setup.outputs.release-requested)
327 | ) && 'nightly' || ''
328 | }}]
329 | needs:
330 | - pre-setup
331 |
332 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length
333 | with:
334 | cache-key-for-dependency-files: >-
335 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }}
336 | check-name: Build dists under 🐍3.13
337 | checkout-src-git-committish: >-
338 | ${{ github.event.inputs.release-committish }}
339 | checkout-src-git-fetch-depth: >-
340 | ${{
341 | fromJSON(needs.pre-setup.outputs.release-requested)
342 | && 1
343 | || 0
344 | }}
345 | job-dependencies-context: >- # context for hooks
346 | ${{ toJSON(needs) }}
347 | python-version: 3.13
348 | runner-vm-os: ubuntu-latest
349 | timeout-minutes: 2
350 | toxenv: build-dists
351 | xfail: false
352 |
353 | lint:
354 | name: 🧹 Linters${{ '' }} # nest jobs under the same sidebar category
355 | needs:
356 | - build
357 | - pre-setup # transitive, for accessing settings
358 |
359 | strategy:
360 | matrix:
361 | runner-vm-os:
362 | - ubuntu-latest
363 | python-version:
364 | - 3.13
365 | toxenv:
366 | # - pre-commit
367 | - metadata-validation
368 | # - build-docs
369 | # - coverage-docs
370 | # - doctest-docs
371 | # - linkcheck-docs
372 | # - spellcheck-docs
373 | xfail:
374 | - false
375 | fail-fast: false
376 |
377 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length
378 | with:
379 | built-wheel-names: >-
380 | ${{
381 | matrix.toxenv == 'metadata-validation'
382 | && needs.pre-setup.outputs.wheel-artifact-name
383 | || ''
384 | }}
385 | cache-key-for-dependency-files: >-
386 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }}
387 | checkout-src-git-committish: >-
388 | ${{ github.event.inputs.release-committish }}
389 | checkout-src-git-fetch-depth: >-
390 | ${{
391 | fromJSON(needs.pre-setup.outputs.release-requested)
392 | && 1
393 | || 0
394 | }}
395 | dists-artifact-name: >-
396 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
397 | post-toxenv-preparation-command: >-
398 | ${{
399 | matrix.toxenv == 'pre-commit'
400 | && 'python -Im pre_commit install-hooks'
401 | || ''
402 | }}
403 | python-version: >-
404 | ${{ matrix.python-version }}
405 | require-successful-codecov-uploads: >-
406 | ${{
407 | toJSON(
408 | needs.pre-setup.outputs.upstream-repository-id
409 | == github.repository_id
410 | )
411 | }}
412 | runner-vm-os: >-
413 | ${{ matrix.runner-vm-os }}
414 | # NOTE: `pre-commit --show-diff-on-failure` and
415 | # NOTE: `sphinxcontrib-spellcheck` with Git authors allowlist enabled
416 | # NOTE: both depend on the presence of a Git repository.
417 | source-tarball-name: >-
418 | ${{
419 | !contains(
420 | fromJSON('["pre-commit", "spellcheck-docs"]'),
421 | matrix.toxenv
422 | )
423 | && needs.pre-setup.outputs.sdist-artifact-name
424 | || ''
425 | }}
426 | timeout-minutes: 3
427 | toxenv: >-
428 | ${{ matrix.toxenv }}
429 | xfail: >-
430 | ${{
431 | fromJSON(needs.pre-setup.outputs.is-yolo-mode)
432 | || fromJSON(matrix.xfail)
433 | }}
434 | secrets:
435 | codecov-token: ${{ secrets.CODECOV_TOKEN }}
436 |
437 | tests:
438 | name: 🧪 Tests${{ '' }} # nest jobs under the same sidebar category
439 |
440 | needs:
441 | - build
442 | - pre-setup # transitive, for accessing settings
443 |
444 | strategy:
445 | fail-fast: >- # ${{ runner.debug }} is unavailable in this context
446 | ${{
447 | fromJSON(needs.pre-setup.outputs.is-debug-mode)
448 | && false
449 | || true
450 | }}
451 | matrix:
452 | python-version:
453 | # NOTE: The latest and the lowest supported Pythons are prioritized
454 | # NOTE: to improve the responsiveness. It's nice to see the most
455 | # NOTE: important results first.
456 | - 3.13
457 | - 3.9
458 | - pypy-3.11
459 | - >- # str
460 | 3.10
461 | - 3.12
462 | - 3.11
463 | runner-vm-os:
464 | - ubuntu-24.04-arm
465 | - ubuntu-24.04
466 | - macos-15
467 | - macos-13
468 | toxenv:
469 | - py
470 | xfail:
471 | - false
472 |
473 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length
474 | with:
475 | built-wheel-names: >-
476 | ${{ needs.pre-setup.outputs.wheel-artifact-name }}
477 | cache-key-for-dependency-files: >-
478 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }}
479 | check-name: >-
480 | 🧪 🐍${{
481 | matrix.python-version
482 | }} @ ${{
483 | matrix.runner-vm-os
484 | }}
485 | dists-artifact-name: >-
486 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
487 | job-dependencies-context: >- # context for hooks
488 | ${{ toJSON(needs) }}
489 | python-version: >-
490 | ${{ matrix.python-version }}
491 | require-successful-codecov-uploads: >-
492 | ${{
493 | toJSON(
494 | needs.pre-setup.outputs.upstream-repository-id
495 | == github.repository_id
496 | )
497 | }}
498 | runner-vm-os: >-
499 | ${{ matrix.runner-vm-os }}
500 | source-tarball-name: >-
501 | ${{ needs.pre-setup.outputs.sdist-artifact-name }}
502 | timeout-minutes: 5
503 | toxenv: >-
504 | ${{ matrix.toxenv }}
505 | # tox-run-posargs: >-
506 | # --cov-report=xml:.tox/.tmp/.test-results/pytest-${{
507 | # matrix.python-version
508 | # }}/cobertura.xml
509 | # --junitxml=.tox/.tmp/.test-results/pytest-${{
510 | # matrix.python-version
511 | # }}/test.xml
512 | tox-run-posargs: >-
513 | --junitxml=.tox/.tmp/.test-results/pytest-${{
514 | matrix.python-version
515 | }}/test.xml
516 | # tox-rerun-posargs: >-
517 | # -rA
518 | # -vvvvv
519 | # --lf
520 | # --no-cov
521 | # --no-fold-skipped
522 | tox-rerun-posargs: >-
523 | -rA
524 | -vvvvv
525 | --lf
526 | --no-fold-skipped
527 | xfail: >-
528 | ${{
529 | fromJSON(needs.pre-setup.outputs.is-yolo-mode)
530 | || fromJSON(matrix.xfail)
531 | }}
532 | secrets:
533 | codecov-token: ${{ secrets.CODECOV_TOKEN }}
534 |
535 | check: # This job does nothing and is only used for the branch protection
536 | if: always()
537 |
538 | needs:
539 | - lint
540 | - pre-setup # transitive, for accessing settings
541 | - tests
542 |
543 | runs-on: ubuntu-latest
544 |
545 | timeout-minutes: 1
546 |
547 | steps:
548 | - name: Decide whether the needed jobs succeeded or failed
549 | uses: re-actors/alls-green@release/v1
550 | with:
551 | allowed-failures: >-
552 | ${{
553 | fromJSON(needs.pre-setup.outputs.is-yolo-mode)
554 | && 'lint, tests'
555 | || ''
556 | }}
557 | jobs: ${{ toJSON(needs) }}
558 |
559 | publish-pypi:
560 | name: >-
561 | 📦
562 | Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI
563 | needs:
564 | - check
565 | - pre-setup # transitive, for accessing settings
566 | if: >-
567 | always()
568 | && needs.check.result == 'success'
569 | && fromJSON(needs.pre-setup.outputs.release-requested)
570 | && needs.pre-setup.outputs.upstream-repository-id == github.repository_id
571 |
572 | runs-on: ubuntu-latest
573 |
574 | timeout-minutes: 2 # docker+network are slow sometimes
575 |
576 | environment:
577 | name: pypi
578 | url: >-
579 | https://pypi.org/project/${{ env.PROJECT_NAME }}/${{
580 | needs.pre-setup.outputs.dist-version
581 | }}
582 |
583 | permissions:
584 | contents: read # This job doesn't need to `git push` anything
585 | id-token: write # PyPI Trusted Publishing (OIDC)
586 |
587 | steps:
588 | - name: Download all the dists
589 | uses: actions/download-artifact@v4
590 | with:
591 | name: >-
592 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
593 | path: dist/
594 | - name: >-
595 | 📦
596 | Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI
597 | 🔏
598 | uses: pypa/gh-action-pypi-publish@release/v1
599 |
600 | publish-testpypi:
601 | name: >-
602 | 📦
603 | Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI
604 | needs:
605 | - check
606 | - pre-setup # transitive, for accessing settings
607 | if: >-
608 | always()
609 | && needs.check.result == 'success'
610 | && (
611 | fromJSON(needs.pre-setup.outputs.is-untagged-devel)
612 | || fromJSON(needs.pre-setup.outputs.release-requested)
613 | )
614 | && needs.pre-setup.outputs.upstream-repository-id == github.repository_id
615 | && fromJSON(needs.pre-setup.outputs.publishing-to-testpypi-enabled)
616 |
617 | runs-on: ubuntu-latest
618 |
619 | timeout-minutes: 2 # docker+network are slow sometimes
620 |
621 | environment:
622 | name: testpypi
623 | url: >-
624 | https://test.pypi.org/project/${{ env.PROJECT_NAME }}/${{
625 | needs.pre-setup.outputs.dist-version
626 | }}
627 |
628 | permissions:
629 | contents: read # This job doesn't need to `git push` anything
630 | id-token: write # PyPI Trusted Publishing (OIDC)
631 |
632 | steps:
633 | - name: Download all the dists
634 | uses: actions/download-artifact@v4
635 | with:
636 | name: >-
637 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
638 | path: dist/
639 | - name: >-
640 | 📦
641 | Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI
642 | 🔏
643 | uses: pypa/gh-action-pypi-publish@release/v1
644 | with:
645 | repository-url: https://test.pypi.org/legacy/
646 |
647 | post-release-repo-update:
648 | name: >-
649 | 🏷️
650 | Publish post-release Git tag
651 | for ${{ needs.pre-setup.outputs.git-tag }}
652 | needs:
653 | - publish-pypi
654 | - pre-setup # transitive, for accessing settings
655 | if: >-
656 | always()
657 | && needs.publish-pypi.result == 'success'
658 |
659 | runs-on: ubuntu-latest
660 |
661 | timeout-minutes: 1
662 |
663 | permissions:
664 | contents: write # Mandatory for `git push` to work
665 | pull-requests: write
666 |
667 | steps:
668 | - name: Fetch the src snapshot # IMPORTANT: Must be before the tag check
669 | uses: actions/checkout@v4
670 | with:
671 | fetch-depth: 2
672 | ref: ${{ github.event.inputs.release-committish }}
673 | - name: >-
674 | Check if the requested tag ${{ needs.pre-setup.outputs.git-tag }}
675 | is present and is pointing at the required commit ${{
676 | github.event.inputs.release-committish
677 | }}
678 | id: existing-remote-tag-check
679 | run: |
680 | set -eEuo pipefail
681 |
682 | REMOTE_TAGGED_COMMIT_SHA="$(
683 | git ls-remote --tags --refs $(git remote get-url origin) '${{
684 | needs.pre-setup.outputs.git-tag
685 | }}' | awk '{print $1}'
686 | )"
687 |
688 | if [[ "${REMOTE_TAGGED_COMMIT_SHA}" == '' ]]
689 | then
690 | LAST_HUMAN_COMMIT_SHA=
691 | else
692 | LAST_HUMAN_COMMIT_SHA=$(git rev-parse "${REMOTE_TAGGED_COMMIT_SHA}"^)
693 | fi
694 |
695 | RELEASE_REQUEST_COMMIT_SHA=$(git rev-parse '${{
696 | github.event.inputs.release-committish || 'HEAD'
697 | }}')
698 |
699 | if [[ "${LAST_HUMAN_COMMIT_SHA}" == "${RELEASE_REQUEST_COMMIT_SHA}" ]]
700 | then
701 | echo "already-exists=true" >> "${GITHUB_OUTPUT}"
702 | fi
703 |
704 | - name: Setup git user as [bot]
705 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true'
706 | # Refs:
707 | # * https://github.community/t/github-actions-bot-email-address/17204/6
708 | # * https://github.com/actions/checkout/issues/13#issuecomment-724415212
709 | uses: fregante/setup-git-user@v2
710 |
711 | - name: >-
712 | 🏷️
713 | Tag the release in the local Git repo
714 | as ${{ needs.pre-setup.outputs.git-tag }}
715 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true'
716 | run: >-
717 | git tag
718 | -m '${{ needs.pre-setup.outputs.git-tag }}'
719 | -m 'Published at https://pypi.org/project/${{
720 | env.PROJECT_NAME
721 | }}/${{
722 | needs.pre-setup.outputs.dist-version
723 | }}'
724 | -m 'This release has been produced by the following workflow run: ${{
725 | github.server_url
726 | }}/${{
727 | github.repository
728 | }}/actions/runs/${{
729 | github.run_id
730 | }}'
731 | '${{ needs.pre-setup.outputs.git-tag }}'
732 | --
733 | ${{ github.event.inputs.release-committish }}
734 |
735 | - name: >-
736 | 🏷️
737 | Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding
738 | to the just published release back to GitHub
739 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true'
740 | run: >-
741 | git push --atomic origin
742 | '${{ needs.pre-setup.outputs.git-tag }}'
743 |
744 | slsa-provenance:
745 | name: >-
746 | 🔏
747 | Save in-toto SLSA provenance as a GitHub workflow artifact for
748 | ${{ needs.pre-setup.outputs.git-tag }}
749 | needs:
750 | - build
751 | - post-release-repo-update
752 | - pre-setup # transitive, for accessing settings
753 |
754 | permissions:
755 | actions: read
756 | id-token: write
757 | contents: write
758 |
759 | # Can't pin with hash due to how this workflow works.
760 | uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0 # yamllint disable-line rule:line-length
761 | with:
762 | base64-subjects: >-
763 | ${{
764 | fromJSON(
765 | needs.build.outputs.steps
766 | ).tox-run.outputs.combined-dists-base64-encoded-sha256-hash
767 | }}
768 |
769 | publish-github-attestations:
770 | name: >-
771 | 🔏
772 | Produce a GitHub-native Attestations for
773 | ${{ needs.pre-setup.outputs.git-tag }}
774 | needs:
775 | - post-release-repo-update
776 | - pre-setup # transitive, for accessing settings
777 | if: >-
778 | always()
779 | && needs.post-release-repo-update.result == 'success'
780 | runs-on: ubuntu-latest
781 |
782 | timeout-minutes: 3
783 |
784 | permissions:
785 | attestations: write # IMPORTANT: needed to persist attestations
786 | contents: read
787 | id-token: write # IMPORTANT: mandatory for Sigstore signing
788 |
789 | steps:
790 | - name: Download all the dists
791 | uses: actions/download-artifact@v4
792 | with:
793 | name: >-
794 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
795 | path: dist/
796 |
797 | - name: >-
798 | 🔏
799 | Generate provenance attestations for the dists
800 | uses: actions/attest-build-provenance@v1
801 | with:
802 | subject-path: |
803 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}
804 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}
805 |
806 | publish-github-release:
807 | name: >-
808 | 🏷️
809 | Publish a GitHub Release for
810 | ${{ needs.pre-setup.outputs.git-tag }}
811 | needs:
812 | - post-release-repo-update
813 | - pre-setup # transitive, for accessing settings
814 | - publish-github-attestations
815 | - slsa-provenance
816 | if: >-
817 | always()
818 | && needs.post-release-repo-update.result == 'success'
819 | runs-on: ubuntu-latest
820 |
821 | timeout-minutes: 3
822 |
823 | permissions:
824 | contents: write
825 | discussions: write
826 | id-token: write # IMPORTANT: mandatory for Sigstore signing
827 |
828 | steps:
829 | - name: Download all the dists
830 | uses: actions/download-artifact@v4
831 | with:
832 | name: >-
833 | ${{ needs.pre-setup.outputs.dists-artifact-name }}
834 | path: dist/
835 | - name: Download SLSA provenance in-toto files
836 | uses: actions/download-artifact@v4
837 | with:
838 | name: >-
839 | ${{ needs.slsa-provenance.outputs.provenance-name }}
840 | path: >-
841 | ${{ needs.slsa-provenance.outputs.provenance-name }}
842 |
843 | - name: Figure out if the current version is a pre-release
844 | id: release-maturity-check
845 | run: |
846 | from os import environ
847 | from pathlib import Path
848 |
849 | release_version = '${{
850 | needs.pre-setup.outputs.dist-version
851 | }}'
852 |
853 | FILE_APPEND_MODE = 'a'
854 |
855 | is_pre_release = any(
856 | hint_char in release_version
857 | for hint_char in {'a', 'b', 'd', 'r'}
858 | )
859 |
860 | with Path(environ['GITHUB_OUTPUT']).open(
861 | mode=FILE_APPEND_MODE,
862 | ) as outputs_file:
863 | print(
864 | f'is-pre-release={is_pre_release !s}'.lower(),
865 | file=outputs_file,
866 | )
867 | shell: python
868 | - name: Prepare the release notes file for the GitHub Releases
869 | run: |
870 | echo '## 📝 Release notes' | tee -a release-notes.md
871 | echo | tee -a release-notes.md
872 | echo | tee -a release-notes.md
873 | echo '📦 PyPI page: https://pypi.org/project/${{
874 | env.PROJECT_NAME
875 | }}/${{
876 | needs.pre-setup.outputs.dist-version
877 | }}' | tee -a release-notes.md
878 | echo | tee -a release-notes.md
879 | echo | tee -a release-notes.md
880 | echo '${{
881 | steps.release-maturity-check.outputs.is-pre-release == 'true'
882 | && format(
883 | '🚧 {0} is marked as a pre-release.',
884 | needs.pre-setup.outputs.git-tag
885 | )
886 | || format(
887 | '🌱 {0} is marked as a stable release.',
888 | needs.pre-setup.outputs.git-tag
889 | )
890 | }}' | tee -a release-notes.md
891 | echo | tee -a release-notes.md
892 | echo | tee -a release-notes.md
893 | echo '🔗 This release has been produced by ' \
894 | 'the following workflow run: ${{
895 | github.server_url
896 | }}/${{
897 | github.repository
898 | }}/actions/runs/${{
899 | github.run_id
900 | }}' | tee -a release-notes.md
901 | echo | tee -a release-notes.md
902 | echo | tee -a release-notes.md
903 | shell: bash
904 |
905 | - name: Sign the dists with Sigstore
906 | uses: sigstore/gh-action-sigstore-python@v3.0.0
907 | with:
908 | inputs: >-
909 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}
910 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}
911 |
912 | - name: >-
913 | Publish a GitHub Release for
914 | ${{ needs.pre-setup.outputs.git-tag }}
915 | with Sigstore-signed artifacts
916 | uses: ncipollo/release-action@v1
917 | with:
918 | allowUpdates: false
919 | artifactErrorsFailBuild: false
920 | artifacts: |
921 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}
922 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}.sigstore.json
923 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}
924 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}.sigstore.json
925 | ${{ needs.slsa-provenance.outputs.provenance-name }}/*
926 | artifactContentType: raw # Because whl and tgz are of different types
927 | bodyFile: release-notes.md
928 | # discussionCategory: Announcements # FIXME: uncomment post #84
929 | draft: false
930 | name: ${{ needs.pre-setup.outputs.git-tag }}
931 | omitBodyDuringUpdate: true
932 | omitName: false
933 | omitNameDuringUpdate: true
934 | omitPrereleaseDuringUpdate: true
935 | prerelease: ${{ steps.release-maturity-check.outputs.is-pre-release }}
936 | removeArtifacts: false
937 | replacesArtifacts: false
938 | tag: ${{ needs.pre-setup.outputs.git-tag }}
939 | token: ${{ secrets.GITHUB_TOKEN }}
940 |
941 | ...
942 |
--------------------------------------------------------------------------------
/.github/workflows/scheduled-runs.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: ⏳
4 |
5 | on:
6 | pull_request:
7 | paths: # only changes to this workflow itself trigger PR testing
8 | - .github/workflows/scheduled-runs.yml
9 | schedule:
10 | - cron: 7 6 * * * # run daily at 6:07 UTC
11 | workflow_dispatch: # manual trigger
12 |
13 | run-name: >-
14 | 🌃
15 | Nightly run of
16 | ${{
17 | github.event.pull_request.number && 'PR' || ''
18 | }}${{
19 | !github.event.pull_request.number && 'Commit' || ''
20 | }}
21 | ${{ github.event.pull_request.number || github.sha }}
22 | triggered by: ${{ github.event_name }} of ${{
23 | github.ref
24 | }} ${{
25 | github.ref_type
26 | }}
27 | (workflow run ID: ${{
28 | github.run_id
29 | }}; number: ${{
30 | github.run_number
31 | }}; attempt: ${{
32 | github.run_attempt
33 | }})
34 |
35 | jobs:
36 | main-ci-cd-pipeline:
37 | name: 🧪 Main CI/CD pipeline
38 | uses: ./.github/workflows/ci-cd.yml
39 | secrets: inherit
40 |
41 | ...
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 |
26 | # PyInstaller
27 | # Usually these files are written by a python script from a template
28 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
29 | *.manifest
30 | *.spec
31 |
32 | # Installer logs
33 | pip-log.txt
34 | pip-delete-this-directory.txt
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .coverage
40 | .cache
41 | nosetests.xml
42 | coverage.xml
43 |
44 | # Translations
45 | *.mo
46 | *.pot
47 |
48 | # Django stuff:
49 | *.log
50 |
51 | # Sphinx documentation
52 | docs/_build/
53 |
54 | # PyBuilder
55 | target/
56 |
57 | # pbr
58 | # These are auto-generated by pbr (http://docs.openstack.org/developer/pbr/#authors-and-changelog)
59 | AUTHORS
60 | ChangeLog
61 |
62 | # vagrant
63 | .vagrant
64 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.7.12
2 | 3.8.1
3 | 3.9.9
4 | 3.10.1
5 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 |
2 | clean:
3 | find . -name '*.pyc' -exec rm -f {} +
4 | find . -name '*.pyo' -exec rm -f {} +
5 | find . -name '*~' -exec rm -f {} +
6 | find . -name '__pycache__' -exec rm -rf {} +
7 |
8 | test:
9 | tox -e py27
10 |
11 | test-tox:
12 | tox
13 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | requests-unixsocket
2 | ===================
3 |
4 | .. image:: https://badge.fury.io/py/requests-unixsocket.svg
5 | :target: https://badge.fury.io/py/requests-unixsocket
6 | :alt: Latest Version on PyPI
7 |
8 | .. image:: https://github.com/msabramo/requests-unixsocket/actions/workflows/ci-cd.yml/badge.svg?event=push
9 | :target: https://github.com/msabramo/requests-unixsocket/actions/workflows/ci-cd.yml
10 |
11 | Use `requests `_ to talk HTTP via a UNIX domain socket
12 |
13 | Usage
14 | -----
15 |
16 | Explicit
17 | ++++++++
18 |
19 | You can use it by instantiating a special ``Session`` object:
20 |
21 | .. code-block:: python
22 |
23 | import json
24 |
25 | import requests_unixsocket
26 |
27 | session = requests_unixsocket.Session()
28 |
29 | r = session.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info')
30 | registry_config = r.json()['RegistryConfig']
31 | print(json.dumps(registry_config, indent=4))
32 |
33 |
34 | Implicit (monkeypatching)
35 | +++++++++++++++++++++++++
36 |
37 | Monkeypatching allows you to use the functionality in this module, while making
38 | minimal changes to your code. Note that in the above example we had to
39 | instantiate a special ``requests_unixsocket.Session`` object and call the
40 | ``get`` method on that object. Calling ``requests.get(url)`` (the easiest way
41 | to use requests and probably very common), would not work. But we can make it
42 | work by doing monkeypatching.
43 |
44 | You can monkeypatch globally:
45 |
46 | .. code-block:: python
47 |
48 | import requests_unixsocket
49 |
50 | requests_unixsocket.monkeypatch()
51 |
52 | r = requests.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info')
53 | assert r.status_code == 200
54 |
55 | or you can do it temporarily using a context manager:
56 |
57 | .. code-block:: python
58 |
59 | import requests_unixsocket
60 |
61 | with requests_unixsocket.monkeypatch():
62 | r = requests.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info')
63 | assert r.status_code == 200
64 |
65 |
66 | Abstract namespace sockets
67 | ++++++++++++++++++++++++++
68 |
69 | To connect to an `abstract namespace
70 | socket `_
71 | (Linux only), prefix the name with a NULL byte (i.e.: `\0`) - e.g.:
72 |
73 | .. code-block:: python
74 |
75 | import requests_unixsocket
76 |
77 | session = requests_unixsocket.Session()
78 | res = session.get('http+unix://\0test_socket/get')
79 | print(res.text)
80 |
81 | For an example program that illustrates this, see
82 | ``examples/abstract_namespace.py`` in the git repo. Since abstract namespace
83 | sockets are specific to Linux, the program will only work on Linux.
84 |
85 |
86 | See also
87 | --------
88 |
89 | - https://github.com/httpie/httpie-unixsocket - a plugin for `HTTPie `_ that allows you to interact with UNIX domain sockets
90 |
--------------------------------------------------------------------------------
/examples/abstract_namespace.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # Example of interacting with a [abstract namespace
4 | # socket](https://utcc.utoronto.ca/~cks/space/blog/python/AbstractUnixSocketsAndPeercred)
5 | #
6 | # Since abstract namespace sockets are specific to Linux, this program will
7 | # only work on Linux.
8 |
9 | import os
10 | import socket
11 |
12 | import requests_unixsocket
13 |
14 |
15 | def handle_response():
16 | # Listens on an abstract namespace socket and sends one HTTP response
17 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
18 | sock.bind('\0test_socket')
19 | sock.listen(1)
20 | client_sock, addr = sock.accept()
21 | client_sock.recv(1024)
22 | client_sock.sendall(b'HTTP/1.0 200 OK\r\n')
23 | client_sock.sendall(b'Content-Type: text/plain\r\n\r\n')
24 | client_sock.sendall(b'Hello world!')
25 |
26 |
27 | if os.fork() == 0: # child
28 | handle_response()
29 | else: # parent
30 | try:
31 | session = requests_unixsocket.Session()
32 | res = session.get('http+unix://\0test_socket/get')
33 | print(res.text)
34 | finally:
35 | os.wait()
36 |
--------------------------------------------------------------------------------
/examples/docker-info.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 |
5 | import requests_unixsocket
6 |
7 | session = requests_unixsocket.Session()
8 |
9 | r = session.get('http+unix://%2Fvar%2Frun%2Fdocker.sock/info')
10 | registry_config = r.json()['RegistryConfig']
11 | print(json.dumps(registry_config, indent=4))
12 |
--------------------------------------------------------------------------------
/examples/simple-http.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 |
5 | import requests_unixsocket
6 |
7 | session = requests_unixsocket.Session()
8 |
9 | url = sys.argv[1]
10 | res = session.get(url)
11 | print(res.text)
12 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools >= 64",
4 | "setuptools_scm >= 8",
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
8 | [dependency-groups]
9 | building = [
10 | "build",
11 | ]
12 | testing = [
13 | "pep8",
14 | "pytest",
15 | "pytest-cache",
16 | "pytest-pep8",
17 | "waitress",
18 | ]
19 | upstreaming = [
20 | "setuptools-scm",
21 | "twine",
22 | ]
23 |
24 | [project]
25 | name = "requests-unixsocket"
26 | description = "Use requests to talk HTTP via a UNIX domain socket"
27 | requires-python = ">= 3.9"
28 | dependencies = [
29 | "requests >= 1.1",
30 | ]
31 | classifiers = [
32 | "Development Status :: 3 - Alpha",
33 |
34 | "Intended Audience :: Developers",
35 | "Intended Audience :: Information Technology",
36 |
37 | "Operating System :: OS Independent",
38 |
39 | "Programming Language :: Python",
40 | "Programming Language :: Python :: 3",
41 | "Programming Language :: Python :: 3 :: Only",
42 | "Programming Language :: Python :: 3.9",
43 | "Programming Language :: Python :: 3.10",
44 | "Programming Language :: Python :: 3.11",
45 | "Programming Language :: Python :: 3.12",
46 | "Programming Language :: Python :: 3.13",
47 | "Programming Language :: Python :: Implementation",
48 | "Programming Language :: Python :: Implementation :: CPython",
49 | "Programming Language :: Python :: Implementation :: Jython",
50 | "Programming Language :: Python :: Implementation :: PyPy",
51 | ]
52 | dynamic = [
53 | "version",
54 | ]
55 |
56 | [[project.authors]]
57 | name = "Marc Abramowitz"
58 | email = "marc@marc-abramowitz.com"
59 |
60 | [project.readme]
61 | file = "README.rst"
62 | content-type = "text/x-rst"
63 |
64 | [project.urls]
65 | Homepage = "https://github.com/msabramo/requests-unixsocket"
66 |
67 | [tool.setuptools_scm]
68 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = --tb=short
3 |
--------------------------------------------------------------------------------
/requests_unixsocket/__init__.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 |
4 | from .adapters import UnixAdapter
5 |
6 | DEFAULT_SCHEME = 'http+unix://'
7 |
8 |
9 | class Session(requests.Session):
10 | def __init__(self, url_scheme=DEFAULT_SCHEME, *args, **kwargs):
11 | super(Session, self).__init__(*args, **kwargs)
12 | self.mount(url_scheme, UnixAdapter())
13 |
14 |
15 | class monkeypatch(object):
16 | def __init__(self, url_scheme=DEFAULT_SCHEME):
17 | self.session = Session()
18 | requests = self._get_global_requests_module()
19 |
20 | # Methods to replace
21 | self.methods = ('request', 'get', 'head', 'post',
22 | 'patch', 'put', 'delete', 'options')
23 | # Store the original methods
24 | self.orig_methods = dict(
25 | (m, requests.__dict__[m]) for m in self.methods)
26 | # Monkey patch
27 | g = globals()
28 | for m in self.methods:
29 | requests.__dict__[m] = g[m]
30 |
31 | def _get_global_requests_module(self):
32 | return sys.modules['requests']
33 |
34 | def __enter__(self):
35 | return self
36 |
37 | def __exit__(self, *args):
38 | requests = self._get_global_requests_module()
39 | for m in self.methods:
40 | requests.__dict__[m] = self.orig_methods[m]
41 |
42 |
43 | # These are the same methods defined for the global requests object
44 | def request(method, url, **kwargs):
45 | session = Session()
46 | return session.request(method=method, url=url, **kwargs)
47 |
48 |
49 | def get(url, **kwargs):
50 | kwargs.setdefault('allow_redirects', True)
51 | return request('get', url, **kwargs)
52 |
53 |
54 | def head(url, **kwargs):
55 | kwargs.setdefault('allow_redirects', False)
56 | return request('head', url, **kwargs)
57 |
58 |
59 | def post(url, data=None, json=None, **kwargs):
60 | return request('post', url, data=data, json=json, **kwargs)
61 |
62 |
63 | def patch(url, data=None, **kwargs):
64 | return request('patch', url, data=data, **kwargs)
65 |
66 |
67 | def put(url, data=None, **kwargs):
68 | return request('put', url, data=data, **kwargs)
69 |
70 |
71 | def delete(url, **kwargs):
72 | return request('delete', url, **kwargs)
73 |
74 |
75 | def options(url, **kwargs):
76 | kwargs.setdefault('allow_redirects', True)
77 | return request('options', url, **kwargs)
78 |
--------------------------------------------------------------------------------
/requests_unixsocket/adapters.py:
--------------------------------------------------------------------------------
1 | import socket
2 |
3 | from requests.adapters import HTTPAdapter
4 | from requests.compat import urlparse, unquote
5 |
6 | try:
7 | from requests.packages import urllib3
8 | except ImportError:
9 | import urllib3
10 |
11 |
12 | # The following was adapted from some code from docker-py
13 | # https://github.com/docker/docker-py/blob/master/docker/transport/unixconn.py
14 | class UnixHTTPConnection(urllib3.connection.HTTPConnection, object):
15 |
16 | def __init__(self, unix_socket_url, timeout=60):
17 | """Create an HTTP connection to a unix domain socket
18 |
19 | :param unix_socket_url: A URL with a scheme of 'http+unix' and the
20 | netloc is a percent-encoded path to a unix domain socket. E.g.:
21 | 'http+unix://%2Ftmp%2Fprofilesvc.sock/status/pid'
22 | """
23 | super(UnixHTTPConnection, self).__init__('localhost', timeout=timeout)
24 | self.unix_socket_url = unix_socket_url
25 | self.timeout = timeout
26 | self.sock = None
27 |
28 | def __del__(self): # base class does not have d'tor
29 | if self.sock:
30 | self.sock.close()
31 |
32 | def connect(self):
33 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
34 | sock.settimeout(self.timeout)
35 | socket_path = unquote(urlparse(self.unix_socket_url).netloc)
36 | sock.connect(socket_path)
37 | self.sock = sock
38 |
39 |
40 | class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
41 |
42 | def __init__(self, socket_path, timeout=60):
43 | super(UnixHTTPConnectionPool, self).__init__(
44 | 'localhost', timeout=timeout)
45 | self.socket_path = socket_path
46 | self.timeout = timeout
47 |
48 | def _new_conn(self):
49 | return UnixHTTPConnection(self.socket_path, self.timeout)
50 |
51 |
52 | class UnixAdapter(HTTPAdapter):
53 |
54 | def __init__(self, timeout=60, pool_connections=25, *args, **kwargs):
55 | super(UnixAdapter, self).__init__(*args, **kwargs)
56 | self.timeout = timeout
57 | self.pools = urllib3._collections.RecentlyUsedContainer(
58 | pool_connections, dispose_func=lambda p: p.close()
59 | )
60 |
61 | # Fix for requests 2.32.2+: https://github.com/psf/requests/pull/6710
62 | def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
63 | return self.get_connection(request.url, proxies)
64 |
65 | def get_connection(self, url, proxies=None):
66 | proxies = proxies or {}
67 | proxy = proxies.get(urlparse(url.lower()).scheme)
68 |
69 | if proxy:
70 | raise ValueError('%s does not support specifying proxies'
71 | % self.__class__.__name__)
72 |
73 | with self.pools.lock:
74 | pool = self.pools.get(url)
75 | if pool:
76 | return pool
77 |
78 | pool = UnixHTTPConnectionPool(url, self.timeout)
79 | self.pools[url] = pool
80 |
81 | return pool
82 |
83 | def request_url(self, request, proxies):
84 | return request.path_url
85 |
86 | def close(self):
87 | self.pools.clear()
88 |
--------------------------------------------------------------------------------
/requests_unixsocket/tests/test_requests_unixsocket.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """Tests for requests_unixsocket"""
5 |
6 | import logging
7 |
8 | import pytest
9 | import requests
10 |
11 | import requests_unixsocket
12 | from requests_unixsocket.testutils import UnixSocketServerThread
13 |
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | def test_use_UnixAdapter_directly():
19 | """Test using UnixAdapter directly, because
20 | https://github.com/httpie/httpie-unixsocket does this
21 | """
22 | adapter = requests_unixsocket.UnixAdapter()
23 | prepared_request = requests.Request(
24 | method='GET',
25 | url='http+unix://%2Fvar%2Frun%2Fdocker.sock/info',
26 | ).prepare()
27 | url = adapter.request_url(request=prepared_request, proxies=None)
28 | assert url == '/info'
29 |
30 |
31 | def test_unix_domain_adapter_ok():
32 | with UnixSocketServerThread() as usock_thread:
33 | session = requests_unixsocket.Session('http+unix://')
34 | urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
35 | url = 'http+unix://%s/path/to/page' % urlencoded_usock
36 |
37 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
38 | 'options']:
39 | logger.debug('Calling session.%s(%r) ...', method, url)
40 | r = getattr(session, method)(url)
41 | logger.debug(
42 | 'Received response: %r with text: %r and headers: %r',
43 | r, r.text, r.headers)
44 | assert r.status_code == 200
45 | assert r.headers['server'] == 'waitress'
46 | assert r.headers['X-Transport'] == 'unix domain socket'
47 | assert r.headers['X-Requested-Path'] == '/path/to/page'
48 | assert r.headers['X-Socket-Path'] == usock_thread.usock
49 | assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
50 | assert r.url.lower() == url.lower()
51 | if method == 'head':
52 | assert r.text == ''
53 | else:
54 | assert r.text == 'Hello world!'
55 |
56 |
57 | def test_unix_domain_adapter_url_with_query_params():
58 | with UnixSocketServerThread() as usock_thread:
59 | session = requests_unixsocket.Session('http+unix://')
60 | urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
61 | url = ('http+unix://%s'
62 | '/containers/nginx/logs?timestamp=true' % urlencoded_usock)
63 |
64 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
65 | 'options']:
66 | logger.debug('Calling session.%s(%r) ...', method, url)
67 | r = getattr(session, method)(url)
68 | logger.debug(
69 | 'Received response: %r with text: %r and headers: %r',
70 | r, r.text, r.headers)
71 | assert r.status_code == 200
72 | assert r.headers['server'] == 'waitress'
73 | assert r.headers['X-Transport'] == 'unix domain socket'
74 | assert r.headers['X-Requested-Path'] == '/containers/nginx/logs'
75 | assert r.headers['X-Requested-Query-String'] == 'timestamp=true'
76 | assert r.headers['X-Socket-Path'] == usock_thread.usock
77 | assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
78 | assert r.url.lower() == url.lower()
79 | if method == 'head':
80 | assert r.text == ''
81 | else:
82 | assert r.text == 'Hello world!'
83 |
84 |
85 | def test_unix_domain_adapter_connection_error():
86 | session = requests_unixsocket.Session('http+unix://')
87 |
88 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
89 | with pytest.raises(requests.ConnectionError):
90 | getattr(session, method)(
91 | 'http+unix://socket_does_not_exist/path/to/page')
92 |
93 |
94 | def test_unix_domain_adapter_connection_proxies_error():
95 | session = requests_unixsocket.Session('http+unix://')
96 |
97 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
98 | with pytest.raises(ValueError) as excinfo:
99 | getattr(session, method)(
100 | 'http+unix://socket_does_not_exist/path/to/page',
101 | proxies={"http+unix": "http://10.10.1.10:1080"})
102 | assert ('UnixAdapter does not support specifying proxies'
103 | in str(excinfo.value))
104 |
105 |
106 | def test_unix_domain_adapter_monkeypatch():
107 | with UnixSocketServerThread() as usock_thread:
108 | with requests_unixsocket.monkeypatch('http+unix://'):
109 | urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
110 | url = 'http+unix://%s/path/to/page' % urlencoded_usock
111 |
112 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
113 | 'options']:
114 | logger.debug('Calling session.%s(%r) ...', method, url)
115 | r = getattr(requests, method)(url)
116 | logger.debug(
117 | 'Received response: %r with text: %r and headers: %r',
118 | r, r.text, r.headers)
119 | assert r.status_code == 200
120 | assert r.headers['server'] == 'waitress'
121 | assert r.headers['X-Transport'] == 'unix domain socket'
122 | assert r.headers['X-Requested-Path'] == '/path/to/page'
123 | assert r.headers['X-Socket-Path'] == usock_thread.usock
124 | assert isinstance(r.connection,
125 | requests_unixsocket.UnixAdapter)
126 | assert r.url.lower() == url.lower()
127 | if method == 'head':
128 | assert r.text == ''
129 | else:
130 | assert r.text == 'Hello world!'
131 |
132 | for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
133 | with pytest.raises(requests.exceptions.InvalidSchema):
134 | getattr(requests, method)(url)
135 |
--------------------------------------------------------------------------------
/requests_unixsocket/testutils.py:
--------------------------------------------------------------------------------
1 | """
2 | Utilities helpful for writing tests
3 |
4 | Provides a UnixSocketServerThread that creates a running server, listening on a
5 | newly created unix socket.
6 |
7 | Example usage:
8 |
9 | .. code-block:: python
10 |
11 | def test_unix_domain_adapter_monkeypatch():
12 | with UnixSocketServerThread() as usock_thread:
13 | with requests_unixsocket.monkeypatch('http+unix://'):
14 | urlencoded_usock = quote_plus(usock_process.usock)
15 | url = 'http+unix://%s/path/to/page' % urlencoded_usock
16 | r = requests.get(url)
17 | """
18 |
19 | import logging
20 | import os
21 | import threading
22 | import time
23 | import uuid
24 | import waitress
25 |
26 |
27 | logger = logging.getLogger(__name__)
28 |
29 |
30 | class KillThread(threading.Thread):
31 | def __init__(self, server, *args, **kwargs):
32 | super(KillThread, self).__init__(*args, **kwargs)
33 | self.server = server
34 |
35 | def run(self):
36 | time.sleep(1)
37 | logger.debug('Sleeping')
38 | self.server._map.clear()
39 |
40 |
41 | class WSGIApp:
42 | server = None
43 |
44 | def __call__(self, environ, start_response):
45 | logger.debug('WSGIApp.__call__: Invoked for %s', environ['PATH_INFO'])
46 | logger.debug('WSGIApp.__call__: environ = %r', environ)
47 | status_text = '200 OK'
48 | response_headers = [
49 | ('X-Transport', 'unix domain socket'),
50 | ('X-Socket-Path', environ['SERVER_PORT']),
51 | ('X-Requested-Query-String', environ['QUERY_STRING']),
52 | ('X-Requested-Path', environ['PATH_INFO'])]
53 | body_bytes = b'Hello world!'
54 | if environ['REQUEST_METHOD'] == 'HEAD':
55 | body_bytes = b''
56 | start_response(status_text, response_headers)
57 | logger.debug(
58 | 'WSGIApp.__call__: Responding with '
59 | 'status_text = %r; '
60 | 'response_headers = %r; '
61 | 'body_bytes = %r',
62 | status_text, response_headers, body_bytes)
63 | return [body_bytes]
64 |
65 |
66 | class UnixSocketServerThread(threading.Thread):
67 | def __init__(self, *args, **kwargs):
68 | super(UnixSocketServerThread, self).__init__(*args, **kwargs)
69 | self.usock = self.get_tempfile_name()
70 | self.server = None
71 | self.server_ready_event = threading.Event()
72 |
73 | def get_tempfile_name(self):
74 | # I'd rather use tempfile.NamedTemporaryFile but IDNA limits
75 | # the hostname to 63 characters and we'll get a "InvalidURL:
76 | # URL has an invalid label" error if we exceed that.
77 | args = (os.stat(__file__).st_ino, os.getpid(), uuid.uuid4().hex[-8:])
78 | return '/tmp/test_requests.%s_%s_%s' % args
79 |
80 | def run(self):
81 | logger.debug('Call waitress.serve in %r ...', self)
82 | wsgi_app = WSGIApp()
83 | server = waitress.create_server(
84 | wsgi_app,
85 | unix_socket=self.usock,
86 | clear_untrusted_proxy_headers=True,
87 | )
88 | wsgi_app.server = server
89 | self.server = server
90 | self.server_ready_event.set()
91 | server.run()
92 |
93 | def __enter__(self):
94 | logger.debug('Starting %r ...' % self)
95 | self.start()
96 | logger.debug('Started %r.', self)
97 | self.server_ready_event.wait()
98 | return self
99 |
100 | def __exit__(self, *args):
101 | self.server_ready_event.wait()
102 | if self.server:
103 | KillThread(self.server).start()
104 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py37, py38, py39, py310, flake8
3 |
4 |
5 | [python-cli-options]
6 | byte-warnings = -b
7 | byte-errors = -bb
8 | max-isolation = -E -s -I
9 | some-isolation = -E -s
10 | warnings-to-errors = -Werror
11 |
12 |
13 | [testenv]
14 | commands = py.test {posargs:requests_unixsocket/tests}
15 | dependency_groups =
16 | testing
17 |
18 |
19 | [testenv:cleanup-dists]
20 | description =
21 | Wipe the the dist{/} folder
22 | dependency_groups =
23 | commands_pre =
24 | commands =
25 | {envpython} \
26 | {[python-cli-options]byte-errors} \
27 | {[python-cli-options]max-isolation} \
28 | {[python-cli-options]warnings-to-errors} \
29 | -c \
30 | 'import os, shutil, sys; \
31 | dists_dir = "{toxinidir}{/}dist{/}"; \
32 | shutil.rmtree(dists_dir, ignore_errors=True); \
33 | sys.exit(os.path.exists(dists_dir))'
34 | commands_post =
35 | package = skip
36 |
37 |
38 | [testenv:build-dists]
39 | description =
40 | Build dists with {basepython} and put them into the dist{/} folder
41 | dependency_groups =
42 | building
43 | depends =
44 | cleanup-dists
45 | commands =
46 | {envpython} \
47 | {[python-cli-options]byte-errors} \
48 | {[python-cli-options]max-isolation} \
49 | {[python-cli-options]warnings-to-errors} \
50 | -m build \
51 | {posargs:}
52 | commands_post =
53 | package = skip
54 |
55 |
56 | [testenv:metadata-validation]
57 | description =
58 | Verify that dists under the `dist{/}` dir
59 | have valid metadata
60 | dependency_groups =
61 | upstreaming
62 | depends =
63 | build-dists
64 | commands_pre =
65 | {envpython} \
66 | {[python-cli-options]byte-errors} \
67 | {[python-cli-options]max-isolation} \
68 | {[python-cli-options]warnings-to-errors} \
69 | '-Wdefault{:}git archive did not support describe output\
70 | {:}UserWarning{:}setuptools_scm.git' \
71 | '-Wdefault{:}unprocessed git archival found\
72 | {:}UserWarning{:}setuptools_scm.git' \
73 | -m setuptools_scm \
74 | ls
75 | commands =
76 | {envpython} \
77 | {[python-cli-options]byte-errors} \
78 | {[python-cli-options]max-isolation} \
79 | {[python-cli-options]warnings-to-errors} \
80 | -m twine \
81 | check \
82 | --strict \
83 | dist{/}*
84 | commands_post =
85 | package = skip
86 |
87 | [testenv:flake8]
88 | commands = flake8
89 | deps =
90 | flake8
91 | {[testenv]deps}
92 |
93 | [testenv:venv]
94 | commands = {posargs}
95 |
96 | [testenv:coverage]
97 | commands =
98 | coverage erase
99 | coverage run --source requests_unixsocket -m py.test requests_unixsocket/tests
100 | coverage report --show-missing
101 | coverage html
102 | deps =
103 | coverage
104 | {[testenv]deps}
105 |
106 | [testenv:doctest]
107 | # note this only works under python 3 because of unicode literals
108 | commands =
109 | python -m doctest README.rst
110 |
111 | [testenv:sphinx-doctest]
112 | # note this only works under python 3 because of unicode literals
113 | commands =
114 | mkdir build/sphinx/doctest
115 | sphinx-build -b doctest docs build/sphinx/doctest
116 | deps =
117 | pbr
118 | {[testenv]deps}
119 |
120 | [testenv:docs]
121 | commands = python setup.py build_sphinx
122 |
123 | [flake8]
124 | max_line_length = 79
125 | exclude = .git,.tox,dist,docs,*egg
126 |
--------------------------------------------------------------------------------
/toxfile.py:
--------------------------------------------------------------------------------
1 | """Project-local tox env customizations."""
2 |
3 | import platform
4 | import ssl
5 | from base64 import b64encode
6 | from hashlib import sha256
7 | from logging import getLogger
8 | from os import environ, getenv
9 | from pathlib import Path
10 |
11 | from tox.execute.request import StdinSource
12 | from tox.plugin import impl
13 | from tox.tox_env.api import ToxEnv
14 |
15 |
16 | IS_GITHUB_ACTIONS_RUNTIME = getenv('GITHUB_ACTIONS') == 'true'
17 | FILE_APPEND_MODE = 'a'
18 | UNICODE_ENCODING = 'utf-8'
19 | SYS_PLATFORM = platform.system()
20 | IS_WINDOWS = SYS_PLATFORM == 'Windows'
21 |
22 |
23 | logger = getLogger(__name__)
24 |
25 |
26 | def _log_debug_before_run_commands(msg: str) -> None:
27 | logger.debug(
28 | '%s%s> %s', # noqa: WPS323
29 | 'toxfile',
30 | ':tox_before_run_commands',
31 | msg,
32 | )
33 |
34 |
35 | def _log_info_before_run_commands(msg: str) -> None:
36 | logger.info(
37 | '%s%s> %s', # noqa: WPS323
38 | 'toxfile',
39 | ':tox_before_run_commands',
40 | msg,
41 | )
42 |
43 |
44 | def _log_warning_before_run_commands(msg: str) -> None:
45 | logger.warning(
46 | '%s%s> %s', # noqa: WPS323
47 | 'toxfile',
48 | ':tox_before_run_commands',
49 | msg,
50 | )
51 |
52 |
53 | @impl
54 | def tox_before_run_commands(tox_env: ToxEnv) -> None: # noqa: WPS210, WPS213
55 | """Display test runtime info when in GitHub Actions CI/CD.
56 |
57 | This also injects ``SOURCE_DATE_EPOCH`` env var into build-dists.
58 |
59 | :param tox_env: A tox environment object.
60 | """
61 | if tox_env.name == 'build-dists':
62 | _log_debug_before_run_commands(
63 | 'Setting the Git HEAD-based epoch for reproducibility in GHA...',
64 | )
65 | git_executable = 'git'
66 | git_log_cmd = ( # noqa: WPS317
67 | git_executable,
68 | '-c',
69 | 'core.pager=', # prevents ANSI escape sequences
70 | 'log',
71 | '-1',
72 | '--pretty=%ct', # noqa: WPS323
73 | )
74 | tox_env.conf['allowlist_externals'].append(git_executable)
75 | git_log_outcome = tox_env.execute(git_log_cmd, StdinSource.OFF)
76 | tox_env.conf['allowlist_externals'].pop()
77 | if git_log_outcome.exit_code:
78 | _log_warning_before_run_commands(
79 | f'Failed to look up Git HEAD timestamp. {git_log_outcome!s}',
80 | )
81 | return
82 |
83 | git_head_timestamp = git_log_outcome.out.strip()
84 |
85 | _log_info_before_run_commands(
86 | f'Setting `SOURCE_DATE_EPOCH={git_head_timestamp!s}` environment '
87 | 'variable to facilitate build reproducibility...',
88 | )
89 | tox_env.environment_variables['SOURCE_DATE_EPOCH'] = git_head_timestamp
90 |
91 | if tox_env.name not in {'py', 'python'} or not IS_GITHUB_ACTIONS_RUNTIME:
92 | _log_debug_before_run_commands(
93 | 'Not logging runtime info because this is not a test run on '
94 | 'GitHub Actions platform...',
95 | )
96 | return
97 |
98 | _log_info_before_run_commands('INFO Logging runtime details...')
99 |
100 | systeminfo_executable = 'systeminfo'
101 | systeminfo_cmd = (systeminfo_executable,)
102 | if IS_WINDOWS:
103 | tox_env.conf['allowlist_externals'].append(systeminfo_executable)
104 | tox_env.execute(systeminfo_cmd, stdin=StdinSource.OFF, show=True)
105 | tox_env.conf['allowlist_externals'].pop()
106 | else:
107 | _log_debug_before_run_commands(
108 | f'Not running {systeminfo_executable!s} because this is '
109 | 'not Windows...',
110 | )
111 |
112 | _log_info_before_run_commands('Logging platform information...')
113 | print( # noqa: T201, WPS421
114 | 'Current platform information:\n'
115 | f'{platform.platform()=}'
116 | f'{platform.system()=}'
117 | f'{platform.version()=}'
118 | f'{platform.uname()=}'
119 | f'{platform.release()=}',
120 | )
121 |
122 | _log_info_before_run_commands('Logging current OpenSSL module...')
123 | print( # noqa: T201, WPS421
124 | 'Current OpenSSL module:\n'
125 | f'{ssl.OPENSSL_VERSION=}\n'
126 | f'{ssl.OPENSSL_VERSION_INFO=}\n'
127 | f'{ssl.OPENSSL_VERSION_NUMBER=}',
128 | )
129 |
130 |
131 | def _log_debug_after_run_commands(msg: str) -> None:
132 | logger.debug(
133 | '%s%s> %s', # noqa: WPS323
134 | 'toxfile',
135 | ':tox_after_run_commands',
136 | msg,
137 | )
138 |
139 |
140 | def _compute_sha256sum(file_path: Path) -> str:
141 | return sha256(file_path.read_bytes()).hexdigest()
142 |
143 |
144 | def _produce_sha256sum_line(file_path: Path) -> str:
145 | sha256_str = _compute_sha256sum(file_path)
146 | return f'{sha256_str!s} {file_path.name!s}'
147 |
148 |
149 | @impl
150 | def tox_after_run_commands(tox_env: ToxEnv) -> None:
151 | """Compute combined dists hash post build-dists under GHA.
152 |
153 | :param tox_env: A tox environment object.
154 | """
155 | if tox_env.name == 'build-dists' and IS_GITHUB_ACTIONS_RUNTIME:
156 | _log_debug_after_run_commands(
157 | 'Computing and storing the base64 representation '
158 | 'of the combined dists SHA-256 hash in GHA...',
159 | )
160 | dists_dir_path = Path(__file__).parent / 'dist'
161 | emulated_sha256sum_output = '\n'.join(
162 | _produce_sha256sum_line(artifact_path)
163 | for artifact_path in dists_dir_path.glob('*')
164 | )
165 | emulated_base64_w0_output = b64encode(
166 | emulated_sha256sum_output.encode(),
167 | ).decode()
168 |
169 | with Path(environ['GITHUB_OUTPUT']).open(
170 | encoding=UNICODE_ENCODING,
171 | mode=FILE_APPEND_MODE,
172 | ) as outputs_file:
173 | print( # noqa: T201, WPS421
174 | 'combined-dists-base64-encoded-sha256-hash='
175 | f'{emulated_base64_w0_output!s}',
176 | file=outputs_file,
177 | )
178 |
179 |
180 | def tox_append_version_info() -> str:
181 | """Produce text to be rendered in ``tox --version``.
182 |
183 | :returns: A string with the plugin details.
184 | """
185 | return '[toxfile]' # Broken: https://github.com/tox-dev/tox/issues/3508
186 |
--------------------------------------------------------------------------------